App Engine Python SDK version 1.8.3

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@375 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index dbdf9a3..38d5184 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,77 @@
 
 App Engine SDK - Release Notes
 
+Version 1.8.3
+
+Python
+===============================
+- Published a major rewrite of the Search API documentation. Please see:
+    https://developers.google.com/appengine/docs/python/search/
+- Interfacing into the Task Queue REST API no longer requires including "s~"
+  at the beginning of the project name.
+- Fixed an issue with the Mail API, email addresses that contain encoded
+  newlines as specified in rfc2047 are now parsed correctly.
+- Fixed an issue with channels.send_message failing when a client id has 3 or
+  more dashes.
+- Fixed an issue with ndb.non_transactional correctly restoring the db
+  library's transactional state.
+    https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=232
+- Fixed an issue with NDB raising the correct exception when the rollback
+  itself has an exception.
+    https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=234
+- The NDB model property settings 'default' and 'required' are no longer
+  mutually exclusive.
+    https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=236
+- Fixed an issue with debugger/breakpoint not working on the dev_appserver.
+    http://code.google.com/p/appengine-devappserver2-experiment/issues/detail?
+    id=28
+    http://code.google.com/p/googleappengine/issues/detail?id=9012
+    http://code.google.com/p/googleappengine/issues/detail?id=9027
+- Fixed an issue with NDB queries supporting iterate over distinct queries.
+    http://code.google.com/p/googleappengine/issues/detail?id=9591
+- Fixed an issue with enabling cloud integration for existing apps.
+    https://code.google.com/p/googleappengine/issues/detail?id=9602
+- Fixed an issue with users not being able to view the cron jobs page in the
+  dev_appserver.
+    https://code.google.com/p/googleappengine/issues/detail?id=9665
+- Fixed an issue with the Channel API send_message function not working on the
+  dev_appserver.
+    https://code.google.com/p/googleappengine/issues/detail?id=9675
+- Fixed an issue with Modules get_current_module_name() incorrectly reporting
+  as default when it is not.
+    https://code.google.com/p/googleappengine/issues/detail?id=9687
+- Fixed an issue with the Datastore Admin UI failing to load due to the app
+  having too many kinds.
+    https://code.google.com/p/googleappengine/issues/detail?id=9749
+- Fixed an issue with the Channel API Testbed not working correctly after
+  upgrading to SDK 1.8.2
+    https://code.google.com/p/googleappengine/issues/detail?id=9757
+
+PHP
+===============================
+- Users who are whitelisted to use PHP are now able to deploy PHP to any of
+  their apps.
+- There is now better support for stat() related functions for Google Cloud
+  Storage objects.
+- Added a read through cache to the GCS client to improve performance. This
+  allows for optimistic caching for write-once, read-many applications and
+  performs on-write invalidation.
+- Environment variables that are set in app.yaml are now available in the PHP
+  runtime.
+- You can now pass an array of metadata as part of the Google Cloud Storage
+  upload context.
+- Apps can now use opendir()/readdir()/rewinddir()/closedir() on Google Cloud
+  Storage buckets.
+- The PHP Task Queue API now supports adding tasks in bulk via the addTasks()
+  method on the new PushQueue class.
+- The PHP Task Queue API now supports setting headers on the PushTask class.
+- Interfacing into the Task Queue REST API no longer requires including "s~"
+  at the beginning of the project name.
+- Fixed an issue with the Mail API, email addresses that contain encoded
+  newlines as specified in rfc2047 are now parsed correctly.
+- Fixed an issue with enabling cloud integration for existing apps.
+    https://code.google.com/p/googleappengine/issues/detail?id=9602
+
 Version 1.8.2
 
 Python
diff --git a/VERSION b/VERSION
index cd8cafa..be8ffd7 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.8.2"
-timestamp: 1370993282
+release: "1.8.3"
+timestamp: 1374030712
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/_php_runtime.py b/_php_runtime.py
index dd75b28..1c48e39 100644
--- a/_php_runtime.py
+++ b/_php_runtime.py
@@ -114,6 +114,8 @@
     ]
 
 _PHP_RUNTIME_PATHS = [
+    _DIR_PATH,
+
     os.path.join(_DIR_PATH, 'lib', 'concurrent'),
     os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
     os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
diff --git a/_python_runtime.py b/_python_runtime.py
index dd75b28..1c48e39 100644
--- a/_python_runtime.py
+++ b/_python_runtime.py
@@ -114,6 +114,8 @@
     ]
 
 _PHP_RUNTIME_PATHS = [
+    _DIR_PATH,
+
     os.path.join(_DIR_PATH, 'lib', 'concurrent'),
     os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
     os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
diff --git a/apiclient/__init__.py b/apiclient/__init__.py
new file mode 100644
index 0000000..4802e90
--- /dev/null
+++ b/apiclient/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.0"
diff --git a/apiclient/discovery.py b/apiclient/discovery.py
new file mode 100644
index 0000000..19af41a
--- /dev/null
+++ b/apiclient/discovery.py
@@ -0,0 +1,953 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for discovery based APIs.
+
+A client library for Google's discovery based APIs.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__all__ = [
+    'build',
+    'build_from_document',
+    'fix_method_name',
+    'key2param',
+    ]
+
+import copy
+import httplib2
+import keyword
+import logging
+import os
+import re
+import uritemplate
+import urllib
+import urlparse
+import mimeparse
+import mimetypes
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+from apiclient.errors import HttpError
+from apiclient.errors import InvalidJsonError
+from apiclient.errors import MediaUploadSizeError
+from apiclient.errors import UnacceptableMimeTypeError
+from apiclient.errors import UnknownApiNameOrVersion
+from apiclient.errors import UnknownFileType
+from apiclient.http import HttpRequest
+from apiclient.http import MediaFileUpload
+from apiclient.http import MediaUpload
+from apiclient.model import JsonModel
+from apiclient.model import MediaModel
+from apiclient.model import RawModel
+from apiclient.schema import Schemas
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+from oauth2client.util import positional
+from oauth2client.util import _add_query_parameter
+from oauth2client.anyjson import simplejson
+
+# The client library requires a version of httplib2 that supports RETRIES.
+httplib2.RETRIES = 1
+
+logger = logging.getLogger(__name__)
+
+URITEMPLATE = re.compile('{[^}]*}')
+VARNAME = re.compile('[a-zA-Z0-9_-]+')
+DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
+                 '{api}/{apiVersion}/rest')
+DEFAULT_METHOD_DOC = 'A description of how to use this function'
+HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
+_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
+BODY_PARAMETER_DEFAULT_VALUE = {
+    'description': 'The request body.',
+    'type': 'object',
+    'required': True,
+}
+MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
+  'description': ('The filename of the media request body, or an instance '
+                  'of a MediaUpload object.'),
+  'type': 'string',
+  'required': False,
+}
+
+# Parameters accepted by the stack, but not visible via discovery.
+# TODO(dhermes): Remove 'userip' in 'v2'.
+STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
+STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
+
+# Library-specific reserved words beyond Python keywords.
+RESERVED_WORDS = frozenset(['body'])
+
+
+def fix_method_name(name):
+  """Fix method names to avoid reserved word conflicts.
+
+  Args:
+    name: string, method name.
+
+  Returns:
+    The name with a '_' prefixed if the name is a reserved word.
+  """
+  if keyword.iskeyword(name) or name in RESERVED_WORDS:
+    return name + '_'
+  else:
+    return name
+
+
+def key2param(key):
+  """Converts key names into parameter names.
+
+  For example, converting "max-results" -> "max_results"
+
+  Args:
+    key: string, the method key name.
+
+  Returns:
+    A safe method name based on the key name.
+  """
+  result = []
+  key = list(key)
+  if not key[0].isalpha():
+    result.append('x')
+  for c in key:
+    if c.isalnum():
+      result.append(c)
+    else:
+      result.append('_')
+
+  return ''.join(result)
+
+
+@positional(2)
+def build(serviceName,
+          version,
+          http=None,
+          discoveryServiceUrl=DISCOVERY_URI,
+          developerKey=None,
+          model=None,
+          requestBuilder=HttpRequest):
+  """Construct a Resource for interacting with an API.
+
+  Construct a Resource object for interacting with an API. The serviceName and
+  version are the names from the Discovery service.
+
+  Args:
+    serviceName: string, name of the service.
+    version: string, the version of the service.
+    http: httplib2.Http, An instance of httplib2.Http or something that acts
+      like it that HTTP requests will be made through.
+    discoveryServiceUrl: string, a URI Template that points to the location of
+      the discovery service. It should have two parameters {api} and
+      {apiVersion} that when filled in produce an absolute URI to the discovery
+      document for that service.
+    developerKey: string, key obtained from
+      https://code.google.com/apis/console.
+    model: apiclient.Model, converts to and from the wire format.
+    requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
+      request.
+
+  Returns:
+    A Resource object with methods for interacting with the service.
+  """
+  params = {
+      'api': serviceName,
+      'apiVersion': version
+      }
+
+  if http is None:
+    http = httplib2.Http()
+
+  requested_url = uritemplate.expand(discoveryServiceUrl, params)
+
+  # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
+  # variable that contains the network address of the client sending the
+  # request. If it exists then add that to the request for the discovery
+  # document to avoid exceeding the quota on discovery requests.
+  if 'REMOTE_ADDR' in os.environ:
+    requested_url = _add_query_parameter(requested_url, 'userIp',
+                                         os.environ['REMOTE_ADDR'])
+  logger.info('URL being requested: %s' % requested_url)
+
+  resp, content = http.request(requested_url)
+
+  if resp.status == 404:
+    raise UnknownApiNameOrVersion("name: %s  version: %s" % (serviceName,
+                                                            version))
+  if resp.status >= 400:
+    raise HttpError(resp, content, uri=requested_url)
+
+  try:
+    service = simplejson.loads(content)
+  except ValueError, e:
+    logger.error('Failed to parse as JSON: ' + content)
+    raise InvalidJsonError()
+
+  return build_from_document(content, base=discoveryServiceUrl, http=http,
+      developerKey=developerKey, model=model, requestBuilder=requestBuilder)
+
+
+@positional(1)
+def build_from_document(
+    service,
+    base=None,
+    future=None,
+    http=None,
+    developerKey=None,
+    model=None,
+    requestBuilder=HttpRequest):
+  """Create a Resource for interacting with an API.
+
+  Same as `build()`, but constructs the Resource object from a discovery
+  document that is it given, as opposed to retrieving one over HTTP.
+
+  Args:
+    service: string or object, the JSON discovery document describing the API.
+      The value passed in may either be the JSON string or the deserialized
+      JSON.
+    base: string, base URI for all HTTP requests, usually the discovery URI.
+      This parameter is no longer used as rootUrl and servicePath are included
+      within the discovery document. (deprecated)
+    future: string, discovery document with future capabilities (deprecated).
+    http: httplib2.Http, An instance of httplib2.Http or something that acts
+      like it that HTTP requests will be made through.
+    developerKey: string, Key for controlling API usage, generated
+      from the API Console.
+    model: Model class instance that serializes and de-serializes requests and
+      responses.
+    requestBuilder: Takes an http request and packages it up to be executed.
+
+  Returns:
+    A Resource object with methods for interacting with the service.
+  """
+
+  # future is no longer used.
+  future = {}
+
+  if isinstance(service, basestring):
+    service = simplejson.loads(service)
+  base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
+  schema = Schemas(service)
+
+  if model is None:
+    features = service.get('features', [])
+    model = JsonModel('dataWrapper' in features)
+  return Resource(http=http, baseUrl=base, model=model,
+                  developerKey=developerKey, requestBuilder=requestBuilder,
+                  resourceDesc=service, rootDesc=service, schema=schema)
+
+
+def _cast(value, schema_type):
+  """Convert value to a string based on JSON Schema type.
+
+  See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
+  JSON Schema.
+
+  Args:
+    value: any, the value to convert
+    schema_type: string, the type that value should be interpreted as
+
+  Returns:
+    A string representation of 'value' based on the schema_type.
+  """
+  if schema_type == 'string':
+    if type(value) == type('') or type(value) == type(u''):
+      return value
+    else:
+      return str(value)
+  elif schema_type == 'integer':
+    return str(int(value))
+  elif schema_type == 'number':
+    return str(float(value))
+  elif schema_type == 'boolean':
+    return str(bool(value)).lower()
+  else:
+    if type(value) == type('') or type(value) == type(u''):
+      return value
+    else:
+      return str(value)
+
+
+def _media_size_to_long(maxSize):
+  """Convert a string media size, such as 10GB or 3TB into an integer.
+
+  Args:
+    maxSize: string, size as a string, such as 2MB or 7GB.
+
+  Returns:
+    The size as an integer value.
+  """
+  if len(maxSize) < 2:
+    return 0L
+  units = maxSize[-2:].upper()
+  bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
+  if bit_shift is not None:
+    return long(maxSize[:-2]) << bit_shift
+  else:
+    return long(maxSize)
+
+
+def _media_path_url_from_info(root_desc, path_url):
+  """Creates an absolute media path URL.
+
+  Constructed using the API root URI and service path from the discovery
+  document and the relative path for the API method.
+
+  Args:
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    path_url: String; the relative URL for the API method. Relative to the API
+        root, which is specified in the discovery document.
+
+  Returns:
+    String; the absolute URI for media upload for the API method.
+  """
+  return '%(root)supload/%(service_path)s%(path)s' % {
+      'root': root_desc['rootUrl'],
+      'service_path': root_desc['servicePath'],
+      'path': path_url,
+  }
+
+
+def _fix_up_parameters(method_desc, root_desc, http_method):
+  """Updates parameters of an API method with values specific to this library.
+
+  Specifically, adds whatever global parameters are specified by the API to the
+  parameters for the individual method. Also adds parameters which don't
+  appear in the discovery document, but are available to all discovery based
+  APIs (these are listed in STACK_QUERY_PARAMETERS).
+
+  SIDE EFFECTS: This updates the parameters dictionary object in the method
+  description.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    http_method: String; the HTTP method used to call the API method described
+        in method_desc.
+
+  Returns:
+    The updated Dictionary stored in the 'parameters' key of the method
+        description dictionary.
+  """
+  parameters = method_desc.setdefault('parameters', {})
+
+  # Add in the parameters common to all methods.
+  for name, description in root_desc.get('parameters', {}).iteritems():
+    parameters[name] = description
+
+  # Add in undocumented query parameters.
+  for name in STACK_QUERY_PARAMETERS:
+    parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
+
+  # Add 'body' (our own reserved word) to parameters if the method supports
+  # a request payload.
+  if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
+    body = BODY_PARAMETER_DEFAULT_VALUE.copy()
+    body.update(method_desc['request'])
+    parameters['body'] = body
+
+  return parameters
+
+
+def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
+  """Updates parameters of API by adding 'media_body' if supported by method.
+
+  SIDE EFFECTS: If the method supports media upload and has a required body,
+  sets body to be optional (required=False) instead. Also, if there is a
+  'mediaUpload' in the method description, adds 'media_upload' key to
+  parameters.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+    path_url: String; the relative URL for the API method. Relative to the API
+        root, which is specified in the discovery document.
+    parameters: A dictionary describing method parameters for method described
+        in method_desc.
+
+  Returns:
+    Triple (accept, max_size, media_path_url) where:
+      - accept is a list of strings representing what content types are
+        accepted for media upload. Defaults to empty list if not in the
+        discovery document.
+      - max_size is a long representing the max size in bytes allowed for a
+        media upload. Defaults to 0L if not in the discovery document.
+      - media_path_url is a String; the absolute URI for media upload for the
+        API method. Constructed using the API root URI and service path from
+        the discovery document and the relative path for the API method. If
+        media upload is not supported, this is None.
+  """
+  media_upload = method_desc.get('mediaUpload', {})
+  accept = media_upload.get('accept', [])
+  max_size = _media_size_to_long(media_upload.get('maxSize', ''))
+  media_path_url = None
+
+  if media_upload:
+    media_path_url = _media_path_url_from_info(root_desc, path_url)
+    parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
+    if 'body' in parameters:
+      parameters['body']['required'] = False
+
+  return accept, max_size, media_path_url
+
+
+def _fix_up_method_description(method_desc, root_desc):
+  """Updates a method description in a discovery document.
+
+  SIDE EFFECTS: Changes the parameters dictionary in the method description with
+  extra parameters which are used locally.
+
+  Args:
+    method_desc: Dictionary with metadata describing an API method. Value comes
+        from the dictionary of methods stored in the 'methods' key in the
+        deserialized discovery document.
+    root_desc: Dictionary; the entire original deserialized discovery document.
+
+  Returns:
+    Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
+    where:
+      - path_url is a String; the relative URL for the API method. Relative to
+        the API root, which is specified in the discovery document.
+      - http_method is a String; the HTTP method used to call the API method
+        described in the method description.
+      - method_id is a String; the name of the RPC method associated with the
+        API method, and is in the method description in the 'id' key.
+      - accept is a list of strings representing what content types are
+        accepted for media upload. Defaults to empty list if not in the
+        discovery document.
+      - max_size is a long representing the max size in bytes allowed for a
+        media upload. Defaults to 0L if not in the discovery document.
+      - media_path_url is a String; the absolute URI for media upload for the
+        API method. Constructed using the API root URI and service path from
+        the discovery document and the relative path for the API method. If
+        media upload is not supported, this is None.
+  """
+  path_url = method_desc['path']
+  http_method = method_desc['httpMethod']
+  method_id = method_desc['id']
+
+  parameters = _fix_up_parameters(method_desc, root_desc, http_method)
+  # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
+  # 'parameters' key and needs to know if there is a 'body' parameter because it
+  # also sets a 'media_body' parameter.
+  accept, max_size, media_path_url = _fix_up_media_upload(
+      method_desc, root_desc, path_url, parameters)
+
+  return path_url, http_method, method_id, accept, max_size, media_path_url
+
+
+# TODO(dhermes): Convert this class to ResourceMethod and make it callable
+class ResourceMethodParameters(object):
+  """Represents the parameters associated with a method.
+
+  Attributes:
+    argmap: Map from method parameter name (string) to query parameter name
+        (string).
+    required_params: List of required parameters (represented by parameter
+        name as string).
+    repeated_params: List of repeated parameters (represented by parameter
+        name as string).
+    pattern_params: Map from method parameter name (string) to regular
+        expression (as a string). If the pattern is set for a parameter, the
+        value for that parameter must match the regular expression.
+    query_params: List of parameters (represented by parameter name as string)
+        that will be used in the query string.
+    path_params: Set of parameters (represented by parameter name as string)
+        that will be used in the base URL path.
+    param_types: Map from method parameter name (string) to parameter type. Type
+        can be any valid JSON schema type; valid values are 'any', 'array',
+        'boolean', 'integer', 'number', 'object', or 'string'. Reference:
+        http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
+    enum_params: Map from method parameter name (string) to list of strings,
+       where each list of strings is the list of acceptable enum values.
+  """
+
+  def __init__(self, method_desc):
+    """Constructor for ResourceMethodParameters.
+
+    Sets default values and defers to set_parameters to populate.
+
+    Args:
+      method_desc: Dictionary with metadata describing an API method. Value
+          comes from the dictionary of methods stored in the 'methods' key in
+          the deserialized discovery document.
+    """
+    self.argmap = {}
+    self.required_params = []
+    self.repeated_params = []
+    self.pattern_params = {}
+    self.query_params = []
+    # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
+    #                parsing is gotten rid of.
+    self.path_params = set()
+    self.param_types = {}
+    self.enum_params = {}
+
+    self.set_parameters(method_desc)
+
+  def set_parameters(self, method_desc):
+    """Populates maps and lists based on method description.
+
+    Iterates through each parameter for the method and parses the values from
+    the parameter dictionary.
+
+    Args:
+      method_desc: Dictionary with metadata describing an API method. Value
+          comes from the dictionary of methods stored in the 'methods' key in
+          the deserialized discovery document.
+    """
+    for arg, desc in method_desc.get('parameters', {}).iteritems():
+      param = key2param(arg)
+      self.argmap[param] = arg
+
+      if desc.get('pattern'):
+        self.pattern_params[param] = desc['pattern']
+      if desc.get('enum'):
+        self.enum_params[param] = desc['enum']
+      if desc.get('required'):
+        self.required_params.append(param)
+      if desc.get('repeated'):
+        self.repeated_params.append(param)
+      if desc.get('location') == 'query':
+        self.query_params.append(param)
+      if desc.get('location') == 'path':
+        self.path_params.add(param)
+      self.param_types[param] = desc.get('type', 'string')
+
+    # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
+    #                should have all path parameters already marked with
+    #                'location: path'.
+    for match in URITEMPLATE.finditer(method_desc['path']):
+      for namematch in VARNAME.finditer(match.group(0)):
+        name = key2param(namematch.group(0))
+        self.path_params.add(name)
+        if name in self.query_params:
+          self.query_params.remove(name)
+
+
+def createMethod(methodName, methodDesc, rootDesc, schema):
+  """Creates a method for attaching to a Resource.
+
+  Args:
+    methodName: string, name of the method to use.
+    methodDesc: object, fragment of deserialized discovery document that
+      describes the method.
+    rootDesc: object, the entire deserialized discovery document.
+    schema: object, mapping of schema names to schema descriptions.
+  """
+  methodName = fix_method_name(methodName)
+  (pathUrl, httpMethod, methodId, accept,
+   maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
+
+  parameters = ResourceMethodParameters(methodDesc)
+
+  def method(self, **kwargs):
+    # Don't bother with doc string, it will be over-written by createMethod.
+
+    for name in kwargs.iterkeys():
+      if name not in parameters.argmap:
+        raise TypeError('Got an unexpected keyword argument "%s"' % name)
+
+    # Remove args that have a value of None.
+    keys = kwargs.keys()
+    for name in keys:
+      if kwargs[name] is None:
+        del kwargs[name]
+
+    for name in parameters.required_params:
+      if name not in kwargs:
+        raise TypeError('Missing required parameter "%s"' % name)
+
+    for name, regex in parameters.pattern_params.iteritems():
+      if name in kwargs:
+        if isinstance(kwargs[name], basestring):
+          pvalues = [kwargs[name]]
+        else:
+          pvalues = kwargs[name]
+        for pvalue in pvalues:
+          if re.match(regex, pvalue) is None:
+            raise TypeError(
+                'Parameter "%s" value "%s" does not match the pattern "%s"' %
+                (name, pvalue, regex))
+
+    for name, enums in parameters.enum_params.iteritems():
+      if name in kwargs:
+        # We need to handle the case of a repeated enum
+        # name differently, since we want to handle both
+        # arg='value' and arg=['value1', 'value2']
+        if (name in parameters.repeated_params and
+            not isinstance(kwargs[name], basestring)):
+          values = kwargs[name]
+        else:
+          values = [kwargs[name]]
+        for value in values:
+          if value not in enums:
+            raise TypeError(
+                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
+                (name, value, str(enums)))
+
+    actual_query_params = {}
+    actual_path_params = {}
+    for key, value in kwargs.iteritems():
+      to_type = parameters.param_types.get(key, 'string')
+      # For repeated parameters we cast each member of the list.
+      if key in parameters.repeated_params and type(value) == type([]):
+        cast_value = [_cast(x, to_type) for x in value]
+      else:
+        cast_value = _cast(value, to_type)
+      if key in parameters.query_params:
+        actual_query_params[parameters.argmap[key]] = cast_value
+      if key in parameters.path_params:
+        actual_path_params[parameters.argmap[key]] = cast_value
+    body_value = kwargs.get('body', None)
+    media_filename = kwargs.get('media_body', None)
+
+    if self._developerKey:
+      actual_query_params['key'] = self._developerKey
+
+    model = self._model
+    if methodName.endswith('_media'):
+      model = MediaModel()
+    elif 'response' not in methodDesc:
+      model = RawModel()
+
+    headers = {}
+    headers, params, query, body = model.request(headers,
+        actual_path_params, actual_query_params, body_value)
+
+    expanded_url = uritemplate.expand(pathUrl, params)
+    url = urlparse.urljoin(self._baseUrl, expanded_url + query)
+
+    resumable = None
+    multipart_boundary = ''
+
+    if media_filename:
+      # Ensure we end up with a valid MediaUpload object.
+      if isinstance(media_filename, basestring):
+        (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
+        if media_mime_type is None:
+          raise UnknownFileType(media_filename)
+        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
+          raise UnacceptableMimeTypeError(media_mime_type)
+        media_upload = MediaFileUpload(media_filename,
+                                       mimetype=media_mime_type)
+      elif isinstance(media_filename, MediaUpload):
+        media_upload = media_filename
+      else:
+        raise TypeError('media_filename must be str or MediaUpload.')
+
+      # Check the maxSize
+      if maxSize > 0 and media_upload.size() > maxSize:
+        raise MediaUploadSizeError("Media larger than: %s" % maxSize)
+
+      # Use the media path uri for media uploads
+      expanded_url = uritemplate.expand(mediaPathUrl, params)
+      url = urlparse.urljoin(self._baseUrl, expanded_url + query)
+      if media_upload.resumable():
+        url = _add_query_parameter(url, 'uploadType', 'resumable')
+
+      if media_upload.resumable():
+        # This is all we need to do for resumable, if the body exists it gets
+        # sent in the first request, otherwise an empty body is sent.
+        resumable = media_upload
+      else:
+        # A non-resumable upload
+        if body is None:
+          # This is a simple media upload
+          headers['content-type'] = media_upload.mimetype()
+          body = media_upload.getbytes(0, media_upload.size())
+          url = _add_query_parameter(url, 'uploadType', 'media')
+        else:
+          # This is a multipart/related upload.
+          msgRoot = MIMEMultipart('related')
+          # msgRoot should not write out it's own headers
+          setattr(msgRoot, '_write_headers', lambda self: None)
+
+          # attach the body as one part
+          msg = MIMENonMultipart(*headers['content-type'].split('/'))
+          msg.set_payload(body)
+          msgRoot.attach(msg)
+
+          # attach the media as the second part
+          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
+          msg['Content-Transfer-Encoding'] = 'binary'
+
+          payload = media_upload.getbytes(0, media_upload.size())
+          msg.set_payload(payload)
+          msgRoot.attach(msg)
+          body = msgRoot.as_string()
+
+          multipart_boundary = msgRoot.get_boundary()
+          headers['content-type'] = ('multipart/related; '
+                                     'boundary="%s"') % multipart_boundary
+          url = _add_query_parameter(url, 'uploadType', 'multipart')
+
+    logger.info('URL being requested: %s' % url)
+    return self._requestBuilder(self._http,
+                                model.response,
+                                url,
+                                method=httpMethod,
+                                body=body,
+                                headers=headers,
+                                methodId=methodId,
+                                resumable=resumable)
+
+  docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
+  if len(parameters.argmap) > 0:
+    docs.append('Args:\n')
+
+  # Skip undocumented params and params common to all methods.
+  skip_parameters = rootDesc.get('parameters', {}).keys()
+  skip_parameters.extend(STACK_QUERY_PARAMETERS)
+
+  all_args = parameters.argmap.keys()
+  args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
+
+  # Move body to the front of the line.
+  if 'body' in all_args:
+    args_ordered.append('body')
+
+  for name in all_args:
+    if name not in args_ordered:
+      args_ordered.append(name)
+
+  for arg in args_ordered:
+    if arg in skip_parameters:
+      continue
+
+    repeated = ''
+    if arg in parameters.repeated_params:
+      repeated = ' (repeated)'
+    required = ''
+    if arg in parameters.required_params:
+      required = ' (required)'
+    paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
+    paramdoc = paramdesc.get('description', 'A parameter')
+    if '$ref' in paramdesc:
+      docs.append(
+          ('  %s: object, %s%s%s\n    The object takes the'
+          ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
+            schema.prettyPrintByName(paramdesc['$ref'])))
+    else:
+      paramtype = paramdesc.get('type', 'string')
+      docs.append('  %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
+                                          repeated))
+    enum = paramdesc.get('enum', [])
+    enumDesc = paramdesc.get('enumDescriptions', [])
+    if enum and enumDesc:
+      docs.append('    Allowed values\n')
+      for (name, desc) in zip(enum, enumDesc):
+        docs.append('      %s - %s\n' % (name, desc))
+  if 'response' in methodDesc:
+    if methodName.endswith('_media'):
+      docs.append('\nReturns:\n  The media object as a string.\n\n    ')
+    else:
+      docs.append('\nReturns:\n  An object of the form:\n\n    ')
+      docs.append(schema.prettyPrintSchema(methodDesc['response']))
+
+  setattr(method, '__doc__', ''.join(docs))
+  return (methodName, method)
+
+
+def createNextMethod(methodName):
+  """Creates any _next methods for attaching to a Resource.
+
+  The _next methods allow for easy iteration through list() responses.
+
+  Args:
+    methodName: string, name of the method to use.
+  """
+  methodName = fix_method_name(methodName)
+
+  def methodNext(self, previous_request, previous_response):
+    """Retrieves the next page of results.
+
+Args:
+  previous_request: The request for the previous page. (required)
+  previous_response: The response from the request for the previous page. (required)
+
+Returns:
+  A request object that you can call 'execute()' on to request the next
+  page. Returns None if there are no more items in the collection.
+    """
+    # Retrieve nextPageToken from previous_response
+    # Use as pageToken in previous_request to create new request.
+
+    if 'nextPageToken' not in previous_response:
+      return None
+
+    request = copy.copy(previous_request)
+
+    pageToken = previous_response['nextPageToken']
+    parsed = list(urlparse.urlparse(request.uri))
+    q = parse_qsl(parsed[4])
+
+    # Find and remove old 'pageToken' value from URI
+    newq = [(key, value) for (key, value) in q if key != 'pageToken']
+    newq.append(('pageToken', pageToken))
+    parsed[4] = urllib.urlencode(newq)
+    uri = urlparse.urlunparse(parsed)
+
+    request.uri = uri
+
+    logger.info('URL being requested: %s' % uri)
+
+    return request
+
+  return (methodName, methodNext)
+
+
+class Resource(object):
+  """A class for interacting with a resource."""
+
+  def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
+               resourceDesc, rootDesc, schema):
+    """Build a Resource from the API description.
+
+    Args:
+      http: httplib2.Http, Object to make http requests with.
+      baseUrl: string, base URL for the API. All requests are relative to this
+          URI.
+      model: apiclient.Model, converts to and from the wire format.
+      requestBuilder: class or callable that instantiates an
+          apiclient.HttpRequest object.
+      developerKey: string, key obtained from
+          https://code.google.com/apis/console
+      resourceDesc: object, section of deserialized discovery document that
+          describes a resource. Note that the top level discovery document
+          is considered a resource.
+      rootDesc: object, the entire deserialized discovery document.
+      schema: object, mapping of schema names to schema descriptions.
+    """
+    self._dynamic_attrs = []
+
+    self._http = http
+    self._baseUrl = baseUrl
+    self._model = model
+    self._developerKey = developerKey
+    self._requestBuilder = requestBuilder
+    self._resourceDesc = resourceDesc
+    self._rootDesc = rootDesc
+    self._schema = schema
+
+    self._set_service_methods()
+
+  def _set_dynamic_attr(self, attr_name, value):
+    """Sets an instance attribute and tracks it in a list of dynamic attributes.
+
+    Args:
+      attr_name: string; The name of the attribute to be set
+      value: The value being set on the object and tracked in the dynamic cache.
+    """
+    self._dynamic_attrs.append(attr_name)
+    self.__dict__[attr_name] = value
+
+  def __getstate__(self):
+    """Trim the state down to something that can be pickled.
+
+    Uses the fact that the instance variable _dynamic_attrs holds attrs that
+    will be wiped and restored on pickle serialization.
+    """
+    state_dict = copy.copy(self.__dict__)
+    for dynamic_attr in self._dynamic_attrs:
+      del state_dict[dynamic_attr]
+    del state_dict['_dynamic_attrs']
+    return state_dict
+
+  def __setstate__(self, state):
+    """Reconstitute the state of the object from being pickled.
+
+    Uses the fact that the instance variable _dynamic_attrs holds attrs that
+    will be wiped and restored on pickle serialization.
+    """
+    self.__dict__.update(state)
+    self._dynamic_attrs = []
+    self._set_service_methods()
+
+  def _set_service_methods(self):
+    self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
+    self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
+    self._add_next_methods(self._resourceDesc, self._schema)
+
+  def _add_basic_methods(self, resourceDesc, rootDesc, schema):
+    # Add basic methods to Resource
+    if 'methods' in resourceDesc:
+      for methodName, methodDesc in resourceDesc['methods'].iteritems():
+        fixedMethodName, method = createMethod(
+            methodName, methodDesc, rootDesc, schema)
+        self._set_dynamic_attr(fixedMethodName,
+                               method.__get__(self, self.__class__))
+        # Add in _media methods. The functionality of the attached method will
+        # change when it sees that the method name ends in _media.
+        if methodDesc.get('supportsMediaDownload', False):
+          fixedMethodName, method = createMethod(
+              methodName + '_media', methodDesc, rootDesc, schema)
+          self._set_dynamic_attr(fixedMethodName,
+                                 method.__get__(self, self.__class__))
+
+  def _add_nested_resources(self, resourceDesc, rootDesc, schema):
+    # Add in nested resources
+    if 'resources' in resourceDesc:
+
+      def createResourceMethod(methodName, methodDesc):
+        """Create a method on the Resource to access a nested Resource.
+
+        Args:
+          methodName: string, name of the method to use.
+          methodDesc: object, fragment of deserialized discovery document that
+            describes the method.
+        """
+        methodName = fix_method_name(methodName)
+
+        def methodResource(self):
+          return Resource(http=self._http, baseUrl=self._baseUrl,
+                          model=self._model, developerKey=self._developerKey,
+                          requestBuilder=self._requestBuilder,
+                          resourceDesc=methodDesc, rootDesc=rootDesc,
+                          schema=schema)
+
+        setattr(methodResource, '__doc__', 'A collection resource.')
+        setattr(methodResource, '__is_resource__', True)
+
+        return (methodName, methodResource)
+
+      for methodName, methodDesc in resourceDesc['resources'].iteritems():
+        fixedMethodName, method = createResourceMethod(methodName, methodDesc)
+        self._set_dynamic_attr(fixedMethodName,
+                               method.__get__(self, self.__class__))
+
+  def _add_next_methods(self, resourceDesc, schema):
+    # Add _next() methods
+    # Look for response bodies in schema that contain nextPageToken, and methods
+    # that take a pageToken parameter.
+    if 'methods' in resourceDesc:
+      for methodName, methodDesc in resourceDesc['methods'].iteritems():
+        if 'response' in methodDesc:
+          responseSchema = methodDesc['response']
+          if '$ref' in responseSchema:
+            responseSchema = schema.get(responseSchema['$ref'])
+          hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
+                                                                   {})
+          hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
+          if hasNextPageToken and hasPageToken:
+            fixedMethodName, method = createNextMethod(methodName + '_next')
+            self._set_dynamic_attr(fixedMethodName,
+                                   method.__get__(self, self.__class__))
diff --git a/apiclient/errors.py b/apiclient/errors.py
new file mode 100644
index 0000000..2bf9149
--- /dev/null
+++ b/apiclient/errors.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python2.4
+#
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Errors for the library.
+
+All exceptions defined by the library
+should be defined in this file.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+from oauth2client import util
+from oauth2client.anyjson import simplejson
+
+
+class Error(Exception):
+  """Base error for this module."""
+  pass
+
+
+class HttpError(Error):
+  """HTTP data was invalid or unexpected."""
+
+  @util.positional(3)
+  def __init__(self, resp, content, uri=None):
+    self.resp = resp
+    self.content = content
+    self.uri = uri
+
+  def _get_reason(self):
+    """Calculate the reason for the error from the response content."""
+    reason = self.resp.reason
+    try:
+      data = simplejson.loads(self.content)
+      reason = data['error']['message']
+    except (ValueError, KeyError):
+      pass
+    if reason is None:
+      reason = ''
+    return reason
+
+  def __repr__(self):
+    if self.uri:
+      return '<HttpError %s when requesting %s returned "%s">' % (
+          self.resp.status, self.uri, self._get_reason().strip())
+    else:
+      return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
+
+  __str__ = __repr__
+
+
+class InvalidJsonError(Error):
+  """The JSON returned could not be parsed."""
+  pass
+
+
+class UnknownFileType(Error):
+  """File type unknown or unexpected."""
+  pass
+
+
+class UnknownLinkType(Error):
+  """Link type unknown or unexpected."""
+  pass
+
+
+class UnknownApiNameOrVersion(Error):
+  """No API with that name and version exists."""
+  pass
+
+
+class UnacceptableMimeTypeError(Error):
+  """That is an unacceptable mimetype for this operation."""
+  pass
+
+
+class MediaUploadSizeError(Error):
+  """Media is larger than the method can accept."""
+  pass
+
+
+class ResumableUploadError(HttpError):
+  """Error occured during resumable upload."""
+  pass
+
+
+class InvalidChunkSizeError(Error):
+  """The given chunksize is not valid."""
+  pass
+
+
+class BatchError(HttpError):
+  """Error occured during batch operations."""
+
+  @util.positional(2)
+  def __init__(self, reason, resp=None, content=None):
+    self.resp = resp
+    self.content = content
+    self.reason = reason
+
+  def __repr__(self):
+      return '<BatchError %s "%s">' % (self.resp.status, self.reason)
+
+  __str__ = __repr__
+
+
+class UnexpectedMethodError(Error):
+  """Exception raised by RequestMockBuilder on unexpected calls."""
+
+  @util.positional(1)
+  def __init__(self, methodId=None):
+    """Constructor for an UnexpectedMethodError."""
+    super(UnexpectedMethodError, self).__init__(
+        'Received unexpected call %s' % methodId)
+
+
+class UnexpectedBodyError(Error):
+  """Exception raised by RequestMockBuilder on unexpected bodies."""
+
+  def __init__(self, expected, provided):
+    """Constructor for an UnexpectedMethodError."""
+    super(UnexpectedBodyError, self).__init__(
+        'Expected: [%s] - Provided: [%s]' % (expected, provided))
diff --git a/apiclient/http.py b/apiclient/http.py
new file mode 100644
index 0000000..a956477
--- /dev/null
+++ b/apiclient/http.py
@@ -0,0 +1,1536 @@
+# Copyright (C) 2012 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Classes to encapsulate a single HTTP request.
+
+The classes implement a command pattern, with every
+object supporting an execute() method that does the
+actuall HTTP request.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import StringIO
+import base64
+import copy
+import gzip
+import httplib2
+import mimeparse
+import mimetypes
+import os
+import sys
+import urllib
+import urlparse
+import uuid
+
+from email.generator import Generator
+from email.mime.multipart import MIMEMultipart
+from email.mime.nonmultipart import MIMENonMultipart
+from email.parser import FeedParser
+from errors import BatchError
+from errors import HttpError
+from errors import InvalidChunkSizeError
+from errors import ResumableUploadError
+from errors import UnexpectedBodyError
+from errors import UnexpectedMethodError
+from model import JsonModel
+from oauth2client import util
+from oauth2client.anyjson import simplejson
+
+
+DEFAULT_CHUNK_SIZE = 512*1024
+
+MAX_URI_LENGTH = 2048
+
+
+class MediaUploadProgress(object):
+  """Status of a resumable upload."""
+
+  def __init__(self, resumable_progress, total_size):
+    """Constructor.
+
+    Args:
+      resumable_progress: int, bytes sent so far.
+      total_size: int, total bytes in complete upload, or None if the total
+        upload size isn't known ahead of time.
+    """
+    self.resumable_progress = resumable_progress
+    self.total_size = total_size
+
+  def progress(self):
+    """Percent of upload completed, as a float.
+
+    Returns:
+      the percentage complete as a float, returning 0.0 if the total size of
+      the upload is unknown.
+    """
+    if self.total_size is not None:
+      return float(self.resumable_progress) / float(self.total_size)
+    else:
+      return 0.0
+
+
+class MediaDownloadProgress(object):
+  """Status of a resumable download."""
+
+  def __init__(self, resumable_progress, total_size):
+    """Constructor.
+
+    Args:
+      resumable_progress: int, bytes received so far.
+      total_size: int, total bytes in complete download.
+    """
+    self.resumable_progress = resumable_progress
+    self.total_size = total_size
+
+  def progress(self):
+    """Percent of download completed, as a float.
+
+    Returns:
+      the percentage complete as a float, returning 0.0 if the total size of
+      the download is unknown.
+    """
+    if self.total_size is not None:
+      return float(self.resumable_progress) / float(self.total_size)
+    else:
+      return 0.0
+
+
+class MediaUpload(object):
+  """Describes a media object to upload.
+
+  Base class that defines the interface of MediaUpload subclasses.
+
+  Note that subclasses of MediaUpload may allow you to control the chunksize
+  when uploading a media object. It is important to keep the size of the chunk
+  as large as possible to keep the upload efficient. Other factors may influence
+  the size of the chunk you use, particularly if you are working in an
+  environment where individual HTTP requests may have a hardcoded time limit,
+  such as under certain classes of requests under Google App Engine.
+
+  Streams are io.Base compatible objects that support seek(). Some MediaUpload
+  subclasses support using streams directly to upload data. Support for
+  streaming may be indicated by a MediaUpload sub-class and if appropriate for a
+  platform that stream will be used for uploading the media object. The support
+  for streaming is indicated by has_stream() returning True. The stream() method
+  should return an io.Base object that supports seek(). On platforms where the
+  underlying httplib module supports streaming, for example Python 2.6 and
+  later, the stream will be passed into the http library which will result in
+  less memory being used and possibly faster uploads.
+
+  If you need to upload media that can't be uploaded using any of the existing
+  MediaUpload sub-class then you can sub-class MediaUpload for your particular
+  needs.
+  """
+
+  def chunksize(self):
+    """Chunk size for resumable uploads.
+
+    Returns:
+      Chunk size in bytes.
+    """
+    raise NotImplementedError()
+
+  def mimetype(self):
+    """Mime type of the body.
+
+    Returns:
+      Mime type.
+    """
+    return 'application/octet-stream'
+
+  def size(self):
+    """Size of upload.
+
+    Returns:
+      Size of the body, or None of the size is unknown.
+    """
+    return None
+
+  def resumable(self):
+    """Whether this upload is resumable.
+
+    Returns:
+      True if resumable upload or False.
+    """
+    return False
+
+  def getbytes(self, begin, end):
+    """Get bytes from the media.
+
+    Args:
+      begin: int, offset from beginning of file.
+      length: int, number of bytes to read, starting at begin.
+
+    Returns:
+      A string of bytes read. May be shorter than length if EOF was reached
+      first.
+    """
+    raise NotImplementedError()
+
+  def has_stream(self):
+    """Does the underlying upload support a streaming interface.
+
+    Streaming means it is an io.IOBase subclass that supports seek, i.e.
+    seekable() returns True.
+
+    Returns:
+      True if the call to stream() will return an instance of a seekable io.Base
+      subclass.
+    """
+    return False
+
+  def stream(self):
+    """A stream interface to the data being uploaded.
+
+    Returns:
+      The returned value is an io.IOBase subclass that supports seek, i.e.
+      seekable() returns True.
+    """
+    raise NotImplementedError()
+
+  @util.positional(1)
+  def _to_json(self, strip=None):
+    """Utility function for creating a JSON representation of a MediaUpload.
+
+    Args:
+      strip: array, An array of names of members to not include in the JSON.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    t = type(self)
+    d = copy.copy(self.__dict__)
+    if strip is not None:
+      for member in strip:
+        del d[member]
+    d['_class'] = t.__name__
+    d['_module'] = t.__module__
+    return simplejson.dumps(d)
+
+  def to_json(self):
+    """Create a JSON representation of an instance of MediaUpload.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    return self._to_json()
+
+  @classmethod
+  def new_from_json(cls, s):
+    """Utility class method to instantiate a MediaUpload subclass from a JSON
+    representation produced by to_json().
+
+    Args:
+      s: string, JSON from to_json().
+
+    Returns:
+      An instance of the subclass of MediaUpload that was serialized with
+      to_json().
+    """
+    data = simplejson.loads(s)
+    # Find and call the right classmethod from_json() to restore the object.
+    module = data['_module']
+    m = __import__(module, fromlist=module.split('.')[:-1])
+    kls = getattr(m, data['_class'])
+    from_json = getattr(kls, 'from_json')
+    return from_json(s)
+
+
+class MediaIoBaseUpload(MediaUpload):
+  """A MediaUpload for a io.Base objects.
+
+  Note that the Python file object is compatible with io.Base and can be used
+  with this class also.
+
+    fh = io.BytesIO('...Some data to upload...')
+    media = MediaIoBaseUpload(fh, mimetype='image/png',
+      chunksize=1024*1024, resumable=True)
+    farm.animals().insert(
+        id='cow',
+        name='cow.png',
+        media_body=media).execute()
+
+  Depending on the platform you are working on, you may pass -1 as the
+  chunksize, which indicates that the entire file should be uploaded in a single
+  request. If the underlying platform supports streams, such as Python 2.6 or
+  later, then this can be very efficient as it avoids multiple connections, and
+  also avoids loading the entire file into memory before sending it. Note that
+  Google App Engine has a 5MB limit on request size, so you should never set
+  your chunksize larger than 5MB, or to -1.
+  """
+
+  @util.positional(3)
+  def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
+      resumable=False):
+    """Constructor.
+
+    Args:
+      fd: io.Base or file object, The source of the bytes to upload. MUST be
+        opened in blocking mode, do not use streams opened in non-blocking mode.
+        The given stream must be seekable, that is, it must be able to call
+        seek() on fd.
+      mimetype: string, Mime-type of the file.
+      chunksize: int, File will be uploaded in chunks of this many bytes. Only
+        used if resumable=True. Pass in a value of -1 if the file is to be
+        uploaded as a single chunk. Note that Google App Engine has a 5MB limit
+        on request size, so you should never set your chunksize larger than 5MB,
+        or to -1.
+      resumable: bool, True if this is a resumable upload. False means upload
+        in a single request.
+    """
+    super(MediaIoBaseUpload, self).__init__()
+    self._fd = fd
+    self._mimetype = mimetype
+    if not (chunksize == -1 or chunksize > 0):
+      raise InvalidChunkSizeError()
+    self._chunksize = chunksize
+    self._resumable = resumable
+
+    self._fd.seek(0, os.SEEK_END)
+    self._size = self._fd.tell()
+
+  def chunksize(self):
+    """Chunk size for resumable uploads.
+
+    Returns:
+      Chunk size in bytes.
+    """
+    return self._chunksize
+
+  def mimetype(self):
+    """Mime type of the body.
+
+    Returns:
+      Mime type.
+    """
+    return self._mimetype
+
+  def size(self):
+    """Size of upload.
+
+    Returns:
+      Size of the body, or None of the size is unknown.
+    """
+    return self._size
+
+  def resumable(self):
+    """Whether this upload is resumable.
+
+    Returns:
+      True if resumable upload or False.
+    """
+    return self._resumable
+
+  def getbytes(self, begin, length):
+    """Get bytes from the media.
+
+    Args:
+      begin: int, offset from beginning of file.
+      length: int, number of bytes to read, starting at begin.
+
+    Returns:
+      A string of bytes read. May be shorted than length if EOF was reached
+      first.
+    """
+    self._fd.seek(begin)
+    return self._fd.read(length)
+
+  def has_stream(self):
+    """Does the underlying upload support a streaming interface.
+
+    Streaming means it is an io.IOBase subclass that supports seek, i.e.
+    seekable() returns True.
+
+    Returns:
+      True if the call to stream() will return an instance of a seekable io.Base
+      subclass.
+    """
+    return True
+
+  def stream(self):
+    """A stream interface to the data being uploaded.
+
+    Returns:
+      The returned value is an io.IOBase subclass that supports seek, i.e.
+      seekable() returns True.
+    """
+    return self._fd
+
+  def to_json(self):
+    """This upload type is not serializable."""
+    raise NotImplementedError('MediaIoBaseUpload is not serializable.')
+
+
+class MediaFileUpload(MediaIoBaseUpload):
+  """A MediaUpload for a file.
+
+  Construct a MediaFileUpload and pass as the media_body parameter of the
+  method. For example, if we had a service that allowed uploading images:
+
+
+    media = MediaFileUpload('cow.png', mimetype='image/png',
+      chunksize=1024*1024, resumable=True)
+    farm.animals().insert(
+        id='cow',
+        name='cow.png',
+        media_body=media).execute()
+
+  Depending on the platform you are working on, you may pass -1 as the
+  chunksize, which indicates that the entire file should be uploaded in a single
+  request. If the underlying platform supports streams, such as Python 2.6 or
+  later, then this can be very efficient as it avoids multiple connections, and
+  also avoids loading the entire file into memory before sending it. Note that
+  Google App Engine has a 5MB limit on request size, so you should never set
+  your chunksize larger than 5MB, or to -1.
+  """
+
+  @util.positional(2)
+  def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
+               resumable=False):
+    """Constructor.
+
+    Args:
+      filename: string, Name of the file.
+      mimetype: string, Mime-type of the file. If None then a mime-type will be
+        guessed from the file extension.
+      chunksize: int, File will be uploaded in chunks of this many bytes. Only
+        used if resumable=True. Pass in a value of -1 if the file is to be
+        uploaded in a single chunk. Note that Google App Engine has a 5MB limit
+        on request size, so you should never set your chunksize larger than 5MB,
+        or to -1.
+      resumable: bool, True if this is a resumable upload. False means upload
+        in a single request.
+    """
+    self._filename = filename
+    fd = open(self._filename, 'rb')
+    if mimetype is None:
+      (mimetype, encoding) = mimetypes.guess_type(filename)
+    super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
+                                          resumable=resumable)
+
+  def to_json(self):
+    """Creating a JSON representation of an instance of MediaFileUpload.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    return self._to_json(strip=['_fd'])
+
+  @staticmethod
+  def from_json(s):
+    d = simplejson.loads(s)
+    return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
+                           chunksize=d['_chunksize'], resumable=d['_resumable'])
+
+
+class MediaInMemoryUpload(MediaIoBaseUpload):
+  """MediaUpload for a chunk of bytes.
+
+  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+  the stream.
+  """
+
+  @util.positional(2)
+  def __init__(self, body, mimetype='application/octet-stream',
+               chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
+    """Create a new MediaInMemoryUpload.
+
+  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+  the stream.
+
+  Args:
+    body: string, Bytes of body content.
+    mimetype: string, Mime-type of the file or default of
+      'application/octet-stream'.
+    chunksize: int, File will be uploaded in chunks of this many bytes. Only
+      used if resumable=True.
+    resumable: bool, True if this is a resumable upload. False means upload
+      in a single request.
+    """
+    fd = StringIO.StringIO(body)
+    super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
+                                              resumable=resumable)
+
+
+class MediaIoBaseDownload(object):
+  """"Download media resources.
+
+  Note that the Python file object is compatible with io.Base and can be used
+  with this class also.
+
+
+  Example:
+    request = farms.animals().get_media(id='cow')
+    fh = io.FileIO('cow.png', mode='wb')
+    downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
+
+    done = False
+    while done is False:
+      status, done = downloader.next_chunk()
+      if status:
+        print "Download %d%%." % int(status.progress() * 100)
+    print "Download Complete!"
+  """
+
+  @util.positional(3)
+  def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
+    """Constructor.
+
+    Args:
+      fd: io.Base or file object, The stream in which to write the downloaded
+        bytes.
+      request: apiclient.http.HttpRequest, the media request to perform in
+        chunks.
+      chunksize: int, File will be downloaded in chunks of this many bytes.
+    """
+    self._fd = fd
+    self._request = request
+    self._uri = request.uri
+    self._chunksize = chunksize
+    self._progress = 0
+    self._total_size = None
+    self._done = False
+
+  def next_chunk(self):
+    """Get the next chunk of the download.
+
+    Returns:
+      (status, done): (MediaDownloadStatus, boolean)
+         The value of 'done' will be True when the media has been fully
+         downloaded.
+
+    Raises:
+      apiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    headers = {
+        'range': 'bytes=%d-%d' % (
+            self._progress, self._progress + self._chunksize)
+        }
+    http = self._request.http
+    http.follow_redirects = False
+
+    resp, content = http.request(self._uri, headers=headers)
+    if resp.status in [301, 302, 303, 307, 308] and 'location' in resp:
+        self._uri = resp['location']
+        resp, content = http.request(self._uri, headers=headers)
+    if resp.status in [200, 206]:
+      self._progress += len(content)
+      self._fd.write(content)
+
+      if 'content-range' in resp:
+        content_range = resp['content-range']
+        length = content_range.rsplit('/', 1)[1]
+        self._total_size = int(length)
+
+      if self._progress == self._total_size:
+        self._done = True
+      return MediaDownloadProgress(self._progress, self._total_size), self._done
+    else:
+      raise HttpError(resp, content, uri=self._uri)
+
+
+class _StreamSlice(object):
+  """Truncated stream.
+
+  Takes a stream and presents a stream that is a slice of the original stream.
+  This is used when uploading media in chunks. In later versions of Python a
+  stream can be passed to httplib in place of the string of data to send. The
+  problem is that httplib just blindly reads to the end of the stream. This
+  wrapper presents a virtual stream that only reads to the end of the chunk.
+  """
+
+  def __init__(self, stream, begin, chunksize):
+    """Constructor.
+
+    Args:
+      stream: (io.Base, file object), the stream to wrap.
+      begin: int, the seek position the chunk begins at.
+      chunksize: int, the size of the chunk.
+    """
+    self._stream = stream
+    self._begin = begin
+    self._chunksize = chunksize
+    self._stream.seek(begin)
+
+  def read(self, n=-1):
+    """Read n bytes.
+
+    Args:
+      n, int, the number of bytes to read.
+
+    Returns:
+      A string of length 'n', or less if EOF is reached.
+    """
+    # The data left available to read sits in [cur, end)
+    cur = self._stream.tell()
+    end = self._begin + self._chunksize
+    if n == -1 or cur + n > end:
+      n = end - cur
+    return self._stream.read(n)
+
+
+class HttpRequest(object):
+  """Encapsulates a single HTTP request."""
+
+  @util.positional(4)
+  def __init__(self, http, postproc, uri,
+               method='GET',
+               body=None,
+               headers=None,
+               methodId=None,
+               resumable=None):
+    """Constructor for an HttpRequest.
+
+    Args:
+      http: httplib2.Http, the transport object to use to make a request
+      postproc: callable, called on the HTTP response and content to transform
+                it into a data object before returning, or raising an exception
+                on an error.
+      uri: string, the absolute URI to send the request to
+      method: string, the HTTP method to use
+      body: string, the request body of the HTTP request,
+      headers: dict, the HTTP request headers
+      methodId: string, a unique identifier for the API method being called.
+      resumable: MediaUpload, None if this is not a resumbale request.
+    """
+    self.uri = uri
+    self.method = method
+    self.body = body
+    self.headers = headers or {}
+    self.methodId = methodId
+    self.http = http
+    self.postproc = postproc
+    self.resumable = resumable
+    self.response_callbacks = []
+    self._in_error_state = False
+
+    # Pull the multipart boundary out of the content-type header.
+    major, minor, params = mimeparse.parse_mime_type(
+        headers.get('content-type', 'application/json'))
+
+    # The size of the non-media part of the request.
+    self.body_size = len(self.body or '')
+
+    # The resumable URI to send chunks to.
+    self.resumable_uri = None
+
+    # The bytes that have been uploaded.
+    self.resumable_progress = 0
+
+  @util.positional(1)
+  def execute(self, http=None):
+    """Execute the request.
+
+    Args:
+      http: httplib2.Http, an http object to be used in place of the
+            one the HttpRequest request object was constructed with.
+
+    Returns:
+      A deserialized object model of the response body as determined
+      by the postproc.
+
+    Raises:
+      apiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    if http is None:
+      http = self.http
+    if self.resumable:
+      body = None
+      while body is None:
+        _, body = self.next_chunk(http=http)
+      return body
+    else:
+      if 'content-length' not in self.headers:
+        self.headers['content-length'] = str(self.body_size)
+      # If the request URI is too long then turn it into a POST request.
+      if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
+        self.method = 'POST'
+        self.headers['x-http-method-override'] = 'GET'
+        self.headers['content-type'] = 'application/x-www-form-urlencoded'
+        parsed = urlparse.urlparse(self.uri)
+        self.uri = urlparse.urlunparse(
+            (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
+             None)
+            )
+        self.body = parsed.query
+        self.headers['content-length'] = str(len(self.body))
+
+      resp, content = http.request(str(self.uri), method=str(self.method),
+                                   body=self.body, headers=self.headers)
+      for callback in self.response_callbacks:
+        callback(resp)
+      if resp.status >= 300:
+        raise HttpError(resp, content, uri=self.uri)
+    return self.postproc(resp, content)
+
+  @util.positional(2)
+  def add_response_callback(self, cb):
+    """add_response_headers_callback
+
+    Args:
+      cb: Callback to be called on receiving the response headers, of signature:
+
+      def cb(resp):
+        # Where resp is an instance of httplib2.Response
+    """
+    self.response_callbacks.append(cb)
+
+  @util.positional(1)
+  def next_chunk(self, http=None):
+    """Execute the next step of a resumable upload.
+
+    Can only be used if the method being executed supports media uploads and
+    the MediaUpload object passed in was flagged as using resumable upload.
+
+    Example:
+
+      media = MediaFileUpload('cow.png', mimetype='image/png',
+                              chunksize=1000, resumable=True)
+      request = farm.animals().insert(
+          id='cow',
+          name='cow.png',
+          media_body=media)
+
+      response = None
+      while response is None:
+        status, response = request.next_chunk()
+        if status:
+          print "Upload %d%% complete." % int(status.progress() * 100)
+
+
+    Returns:
+      (status, body): (ResumableMediaStatus, object)
+         The body will be None until the resumable media is fully uploaded.
+
+    Raises:
+      apiclient.errors.HttpError if the response was not a 2xx.
+      httplib2.HttpLib2Error if a transport error has occured.
+    """
+    if http is None:
+      http = self.http
+
+    if self.resumable.size() is None:
+      size = '*'
+    else:
+      size = str(self.resumable.size())
+
+    if self.resumable_uri is None:
+      start_headers = copy.copy(self.headers)
+      start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
+      if size != '*':
+        start_headers['X-Upload-Content-Length'] = size
+      start_headers['content-length'] = str(self.body_size)
+
+      resp, content = http.request(self.uri, self.method,
+                                   body=self.body,
+                                   headers=start_headers)
+      if resp.status == 200 and 'location' in resp:
+        self.resumable_uri = resp['location']
+      else:
+        raise ResumableUploadError(resp, content)
+    elif self._in_error_state:
+      # If we are in an error state then query the server for current state of
+      # the upload by sending an empty PUT and reading the 'range' header in
+      # the response.
+      headers = {
+          'Content-Range': 'bytes */%s' % size,
+          'content-length': '0'
+          }
+      resp, content = http.request(self.resumable_uri, 'PUT',
+                                   headers=headers)
+      status, body = self._process_response(resp, content)
+      if body:
+        # The upload was complete.
+        return (status, body)
+
+    # The httplib.request method can take streams for the body parameter, but
+    # only in Python 2.6 or later. If a stream is available under those
+    # conditions then use it as the body argument.
+    if self.resumable.has_stream() and sys.version_info[1] >= 6:
+      data = self.resumable.stream()
+      if self.resumable.chunksize() == -1:
+        data.seek(self.resumable_progress)
+        chunk_end = self.resumable.size() - self.resumable_progress - 1
+      else:
+        # Doing chunking with a stream, so wrap a slice of the stream.
+        data = _StreamSlice(data, self.resumable_progress,
+                            self.resumable.chunksize())
+        chunk_end = min(
+            self.resumable_progress + self.resumable.chunksize() - 1,
+            self.resumable.size() - 1)
+    else:
+      data = self.resumable.getbytes(
+          self.resumable_progress, self.resumable.chunksize())
+
+      # A short read implies that we are at EOF, so finish the upload.
+      if len(data) < self.resumable.chunksize():
+        size = str(self.resumable_progress + len(data))
+
+      chunk_end = self.resumable_progress + len(data) - 1
+
+    headers = {
+        'Content-Range': 'bytes %d-%d/%s' % (
+            self.resumable_progress, chunk_end, size),
+        # Must set the content-length header here because httplib can't
+        # calculate the size when working with _StreamSlice.
+        'Content-Length': str(chunk_end - self.resumable_progress + 1)
+        }
+    try:
+      resp, content = http.request(self.resumable_uri, 'PUT',
+                                   body=data,
+                                   headers=headers)
+    except:
+      self._in_error_state = True
+      raise
+
+    return self._process_response(resp, content)
+
+  def _process_response(self, resp, content):
+    """Process the response from a single chunk upload.
+
+    Args:
+      resp: httplib2.Response, the response object.
+      content: string, the content of the response.
+
+    Returns:
+      (status, body): (ResumableMediaStatus, object)
+         The body will be None until the resumable media is fully uploaded.
+
+    Raises:
+      apiclient.errors.HttpError if the response was not a 2xx or a 308.
+    """
+    if resp.status in [200, 201]:
+      self._in_error_state = False
+      return None, self.postproc(resp, content)
+    elif resp.status == 308:
+      self._in_error_state = False
+      # A "308 Resume Incomplete" indicates we are not done.
+      self.resumable_progress = int(resp['range'].split('-')[1]) + 1
+      if 'location' in resp:
+        self.resumable_uri = resp['location']
+    else:
+      self._in_error_state = True
+      raise HttpError(resp, content, uri=self.uri)
+
+    return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
+            None)
+
+  def to_json(self):
+    """Returns a JSON representation of the HttpRequest."""
+    d = copy.copy(self.__dict__)
+    if d['resumable'] is not None:
+      d['resumable'] = self.resumable.to_json()
+    del d['http']
+    del d['postproc']
+
+    return simplejson.dumps(d)
+
+  @staticmethod
+  def from_json(s, http, postproc):
+    """Returns an HttpRequest populated with info from a JSON object."""
+    d = simplejson.loads(s)
+    if d['resumable'] is not None:
+      d['resumable'] = MediaUpload.new_from_json(d['resumable'])
+    return HttpRequest(
+        http,
+        postproc,
+        uri=d['uri'],
+        method=d['method'],
+        body=d['body'],
+        headers=d['headers'],
+        methodId=d['methodId'],
+        resumable=d['resumable'])
+
+
+class BatchHttpRequest(object):
+  """Batches multiple HttpRequest objects into a single HTTP request.
+
+  Example:
+    from apiclient.http import BatchHttpRequest
+
+    def list_animals(request_id, response, exception):
+      \"\"\"Do something with the animals list response.\"\"\"
+      if exception is not None:
+        # Do something with the exception.
+        pass
+      else:
+        # Do something with the response.
+        pass
+
+    def list_farmers(request_id, response, exception):
+      \"\"\"Do something with the farmers list response.\"\"\"
+      if exception is not None:
+        # Do something with the exception.
+        pass
+      else:
+        # Do something with the response.
+        pass
+
+    service = build('farm', 'v2')
+
+    batch = BatchHttpRequest()
+
+    batch.add(service.animals().list(), list_animals)
+    batch.add(service.farmers().list(), list_farmers)
+    batch.execute(http=http)
+  """
+
+  @util.positional(1)
+  def __init__(self, callback=None, batch_uri=None):
+    """Constructor for a BatchHttpRequest.
+
+    Args:
+      callback: callable, A callback to be called for each response, of the
+        form callback(id, response, exception). The first parameter is the
+        request id, and the second is the deserialized response object. The
+        third is an apiclient.errors.HttpError exception object if an HTTP error
+        occurred while processing the request, or None if no error occurred.
+      batch_uri: string, URI to send batch requests to.
+    """
+    if batch_uri is None:
+      batch_uri = 'https://www.googleapis.com/batch'
+    self._batch_uri = batch_uri
+
+    # Global callback to be called for each individual response in the batch.
+    self._callback = callback
+
+    # A map from id to request.
+    self._requests = {}
+
+    # A map from id to callback.
+    self._callbacks = {}
+
+    # List of request ids, in the order in which they were added.
+    self._order = []
+
+    # The last auto generated id.
+    self._last_auto_id = 0
+
+    # Unique ID on which to base the Content-ID headers.
+    self._base_id = None
+
+    # A map from request id to (httplib2.Response, content) response pairs
+    self._responses = {}
+
+    # A map of id(Credentials) that have been refreshed.
+    self._refreshed_credentials = {}
+
+  def _refresh_and_apply_credentials(self, request, http):
+    """Refresh the credentials and apply to the request.
+
+    Args:
+      request: HttpRequest, the request.
+      http: httplib2.Http, the global http object for the batch.
+    """
+    # For the credentials to refresh, but only once per refresh_token
+    # If there is no http per the request then refresh the http passed in
+    # via execute()
+    creds = None
+    if request.http is not None and hasattr(request.http.request,
+        'credentials'):
+      creds = request.http.request.credentials
+    elif http is not None and hasattr(http.request, 'credentials'):
+      creds = http.request.credentials
+    if creds is not None:
+      if id(creds) not in self._refreshed_credentials:
+        creds.refresh(http)
+        self._refreshed_credentials[id(creds)] = 1
+
+    # Only apply the credentials if we are using the http object passed in,
+    # otherwise apply() will get called during _serialize_request().
+    if request.http is None or not hasattr(request.http.request,
+        'credentials'):
+      creds.apply(request.headers)
+
+  def _id_to_header(self, id_):
+    """Convert an id to a Content-ID header value.
+
+    Args:
+      id_: string, identifier of individual request.
+
+    Returns:
+      A Content-ID header with the id_ encoded into it. A UUID is prepended to
+      the value because Content-ID headers are supposed to be universally
+      unique.
+    """
+    if self._base_id is None:
+      self._base_id = uuid.uuid4()
+
+    return '<%s+%s>' % (self._base_id, urllib.quote(id_))
+
+  def _header_to_id(self, header):
+    """Convert a Content-ID header value to an id.
+
+    Presumes the Content-ID header conforms to the format that _id_to_header()
+    returns.
+
+    Args:
+      header: string, Content-ID header value.
+
+    Returns:
+      The extracted id value.
+
+    Raises:
+      BatchError if the header is not in the expected format.
+    """
+    if header[0] != '<' or header[-1] != '>':
+      raise BatchError("Invalid value for Content-ID: %s" % header)
+    if '+' not in header:
+      raise BatchError("Invalid value for Content-ID: %s" % header)
+    base, id_ = header[1:-1].rsplit('+', 1)
+
+    return urllib.unquote(id_)
+
+  def _serialize_request(self, request):
+    """Convert an HttpRequest object into a string.
+
+    Args:
+      request: HttpRequest, the request to serialize.
+
+    Returns:
+      The request as a string in application/http format.
+    """
+    # Construct status line
+    parsed = urlparse.urlparse(request.uri)
+    request_line = urlparse.urlunparse(
+        (None, None, parsed.path, parsed.params, parsed.query, None)
+        )
+    status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
+    major, minor = request.headers.get('content-type', 'application/json').split('/')
+    msg = MIMENonMultipart(major, minor)
+    headers = request.headers.copy()
+
+    if request.http is not None and hasattr(request.http.request,
+        'credentials'):
+      request.http.request.credentials.apply(headers)
+
+    # MIMENonMultipart adds its own Content-Type header.
+    if 'content-type' in headers:
+      del headers['content-type']
+
+    for key, value in headers.iteritems():
+      msg[key] = value
+    msg['Host'] = parsed.netloc
+    msg.set_unixfrom(None)
+
+    if request.body is not None:
+      msg.set_payload(request.body)
+      msg['content-length'] = str(len(request.body))
+
+    # Serialize the mime message.
+    fp = StringIO.StringIO()
+    # maxheaderlen=0 means don't line wrap headers.
+    g = Generator(fp, maxheaderlen=0)
+    g.flatten(msg, unixfrom=False)
+    body = fp.getvalue()
+
+    # Strip off the \n\n that the MIME lib tacks onto the end of the payload.
+    if request.body is None:
+      body = body[:-2]
+
+    return status_line.encode('utf-8') + body
+
+  def _deserialize_response(self, payload):
+    """Convert string into httplib2 response and content.
+
+    Args:
+      payload: string, headers and body as a string.
+
+    Returns:
+      A pair (resp, content), such as would be returned from httplib2.request.
+    """
+    # Strip off the status line
+    status_line, payload = payload.split('\n', 1)
+    protocol, status, reason = status_line.split(' ', 2)
+
+    # Parse the rest of the response
+    parser = FeedParser()
+    parser.feed(payload)
+    msg = parser.close()
+    msg['status'] = status
+
+    # Create httplib2.Response from the parsed headers.
+    resp = httplib2.Response(msg)
+    resp.reason = reason
+    resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
+
+    content = payload.split('\r\n\r\n', 1)[1]
+
+    return resp, content
+
+  def _new_id(self):
+    """Create a new id.
+
+    Auto incrementing number that avoids conflicts with ids already used.
+
+    Returns:
+       string, a new unique id.
+    """
+    self._last_auto_id += 1
+    while str(self._last_auto_id) in self._requests:
+      self._last_auto_id += 1
+    return str(self._last_auto_id)
+
+  @util.positional(2)
+  def add(self, request, callback=None, request_id=None):
+    """Add a new request.
+
+    Every callback added will be paired with a unique id, the request_id. That
+    unique id will be passed back to the callback when the response comes back
+    from the server. The default behavior is to have the library generate it's
+    own unique id. If the caller passes in a request_id then they must ensure
+    uniqueness for each request_id, and if they are not an exception is
+    raised. Callers should either supply all request_ids or nevery supply a
+    request id, to avoid such an error.
+
+    Args:
+      request: HttpRequest, Request to add to the batch.
+      callback: callable, A callback to be called for this response, of the
+        form callback(id, response, exception). The first parameter is the
+        request id, and the second is the deserialized response object. The
+        third is an apiclient.errors.HttpError exception object if an HTTP error
+        occurred while processing the request, or None if no errors occurred.
+      request_id: string, A unique id for the request. The id will be passed to
+        the callback with the response.
+
+    Returns:
+      None
+
+    Raises:
+      BatchError if a media request is added to a batch.
+      KeyError is the request_id is not unique.
+    """
+    if request_id is None:
+      request_id = self._new_id()
+    if request.resumable is not None:
+      raise BatchError("Media requests cannot be used in a batch request.")
+    if request_id in self._requests:
+      raise KeyError("A request with this ID already exists: %s" % request_id)
+    self._requests[request_id] = request
+    self._callbacks[request_id] = callback
+    self._order.append(request_id)
+
+  def _execute(self, http, order, requests):
+    """Serialize batch request, send to server, process response.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the request with.
+      order: list, list of request ids in the order they were added to the
+        batch.
+      request: list, list of request objects to send.
+
+    Raises:
+      httplib2.HttpLib2Error if a transport error has occured.
+      apiclient.errors.BatchError if the response is the wrong format.
+    """
+    message = MIMEMultipart('mixed')
+    # Message should not write out it's own headers.
+    setattr(message, '_write_headers', lambda self: None)
+
+    # Add all the individual requests.
+    for request_id in order:
+      request = requests[request_id]
+
+      msg = MIMENonMultipart('application', 'http')
+      msg['Content-Transfer-Encoding'] = 'binary'
+      msg['Content-ID'] = self._id_to_header(request_id)
+
+      body = self._serialize_request(request)
+      msg.set_payload(body)
+      message.attach(msg)
+
+    body = message.as_string()
+
+    headers = {}
+    headers['content-type'] = ('multipart/mixed; '
+                               'boundary="%s"') % message.get_boundary()
+
+    resp, content = http.request(self._batch_uri, 'POST', body=body,
+                                 headers=headers)
+
+    if resp.status >= 300:
+      raise HttpError(resp, content, uri=self._batch_uri)
+
+    # Now break out the individual responses and store each one.
+    boundary, _ = content.split(None, 1)
+
+    # Prepend with a content-type header so FeedParser can handle it.
+    header = 'content-type: %s\r\n\r\n' % resp['content-type']
+    for_parser = header + content
+
+    parser = FeedParser()
+    parser.feed(for_parser)
+    mime_response = parser.close()
+
+    if not mime_response.is_multipart():
+      raise BatchError("Response not in multipart/mixed format.", resp=resp,
+                       content=content)
+
+    for part in mime_response.get_payload():
+      request_id = self._header_to_id(part['Content-ID'])
+      response, content = self._deserialize_response(part.get_payload())
+      self._responses[request_id] = (response, content)
+
+  @util.positional(1)
+  def execute(self, http=None):
+    """Execute all the requests as a single batched HTTP request.
+
+    Args:
+      http: httplib2.Http, an http object to be used in place of the one the
+        HttpRequest request object was constructed with. If one isn't supplied
+        then use a http object from the requests in this batch.
+
+    Returns:
+      None
+
+    Raises:
+      httplib2.HttpLib2Error if a transport error has occured.
+      apiclient.errors.BatchError if the response is the wrong format.
+    """
+
+    # If http is not supplied use the first valid one given in the requests.
+    if http is None:
+      for request_id in self._order:
+        request = self._requests[request_id]
+        if request is not None:
+          http = request.http
+          break
+
+    if http is None:
+      raise ValueError("Missing a valid http object.")
+
+    self._execute(http, self._order, self._requests)
+
+    # Loop over all the requests and check for 401s. For each 401 request the
+    # credentials should be refreshed and then sent again in a separate batch.
+    redo_requests = {}
+    redo_order = []
+
+    for request_id in self._order:
+      resp, content = self._responses[request_id]
+      if resp['status'] == '401':
+        redo_order.append(request_id)
+        request = self._requests[request_id]
+        self._refresh_and_apply_credentials(request, http)
+        redo_requests[request_id] = request
+
+    if redo_requests:
+      self._execute(http, redo_order, redo_requests)
+
+    # Now process all callbacks that are erroring, and raise an exception for
+    # ones that return a non-2xx response? Or add extra parameter to callback
+    # that contains an HttpError?
+
+    for request_id in self._order:
+      resp, content = self._responses[request_id]
+
+      request = self._requests[request_id]
+      callback = self._callbacks[request_id]
+
+      response = None
+      exception = None
+      try:
+        if resp.status >= 300:
+          raise HttpError(resp, content, uri=request.uri)
+        response = request.postproc(resp, content)
+      except HttpError, e:
+        exception = e
+
+      if callback is not None:
+        callback(request_id, response, exception)
+      if self._callback is not None:
+        self._callback(request_id, response, exception)
+
+
+class HttpRequestMock(object):
+  """Mock of HttpRequest.
+
+  Do not construct directly, instead use RequestMockBuilder.
+  """
+
+  def __init__(self, resp, content, postproc):
+    """Constructor for HttpRequestMock
+
+    Args:
+      resp: httplib2.Response, the response to emulate coming from the request
+      content: string, the response body
+      postproc: callable, the post processing function usually supplied by
+                the model class. See model.JsonModel.response() as an example.
+    """
+    self.resp = resp
+    self.content = content
+    self.postproc = postproc
+    if resp is None:
+      self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
+    if 'reason' in self.resp:
+      self.resp.reason = self.resp['reason']
+
+  def execute(self, http=None):
+    """Execute the request.
+
+    Same behavior as HttpRequest.execute(), but the response is
+    mocked and not really from an HTTP request/response.
+    """
+    return self.postproc(self.resp, self.content)
+
+
+class RequestMockBuilder(object):
+  """A simple mock of HttpRequest
+
+    Pass in a dictionary to the constructor that maps request methodIds to
+    tuples of (httplib2.Response, content, opt_expected_body) that should be
+    returned when that method is called. None may also be passed in for the
+    httplib2.Response, in which case a 200 OK response will be generated.
+    If an opt_expected_body (str or dict) is provided, it will be compared to
+    the body and UnexpectedBodyError will be raised on inequality.
+
+    Example:
+      response = '{"data": {"id": "tag:google.c...'
+      requestBuilder = RequestMockBuilder(
+        {
+          'plus.activities.get': (None, response),
+        }
+      )
+      apiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
+
+    Methods that you do not supply a response for will return a
+    200 OK with an empty string as the response content or raise an excpetion
+    if check_unexpected is set to True. The methodId is taken from the rpcName
+    in the discovery document.
+
+    For more details see the project wiki.
+  """
+
+  def __init__(self, responses, check_unexpected=False):
+    """Constructor for RequestMockBuilder
+
+    The constructed object should be a callable object
+    that can replace the class HttpResponse.
+
+    responses - A dictionary that maps methodIds into tuples
+                of (httplib2.Response, content). The methodId
+                comes from the 'rpcName' field in the discovery
+                document.
+    check_unexpected - A boolean setting whether or not UnexpectedMethodError
+                       should be raised on unsupplied method.
+    """
+    self.responses = responses
+    self.check_unexpected = check_unexpected
+
+  def __call__(self, http, postproc, uri, method='GET', body=None,
+               headers=None, methodId=None, resumable=None):
+    """Implements the callable interface that discovery.build() expects
+    of requestBuilder, which is to build an object compatible with
+    HttpRequest.execute(). See that method for the description of the
+    parameters and the expected response.
+    """
+    if methodId in self.responses:
+      response = self.responses[methodId]
+      resp, content = response[:2]
+      if len(response) > 2:
+        # Test the body against the supplied expected_body.
+        expected_body = response[2]
+        if bool(expected_body) != bool(body):
+          # Not expecting a body and provided one
+          # or expecting a body and not provided one.
+          raise UnexpectedBodyError(expected_body, body)
+        if isinstance(expected_body, str):
+          expected_body = simplejson.loads(expected_body)
+        body = simplejson.loads(body)
+        if body != expected_body:
+          raise UnexpectedBodyError(expected_body, body)
+      return HttpRequestMock(resp, content, postproc)
+    elif self.check_unexpected:
+      raise UnexpectedMethodError(methodId=methodId)
+    else:
+      model = JsonModel(False)
+      return HttpRequestMock(None, '{}', model.response)
+
+
+class HttpMock(object):
+  """Mock of httplib2.Http"""
+
+  def __init__(self, filename=None, headers=None):
+    """
+    Args:
+      filename: string, absolute filename to read response from
+      headers: dict, header to return with response
+    """
+    if headers is None:
+      headers = {'status': '200 OK'}
+    if filename:
+      f = file(filename, 'r')
+      self.data = f.read()
+      f.close()
+    else:
+      self.data = None
+    self.response_headers = headers
+    self.headers = None
+    self.uri = None
+    self.method = None
+    self.body = None
+    self.headers = None
+
+
+  def request(self, uri,
+              method='GET',
+              body=None,
+              headers=None,
+              redirections=1,
+              connection_type=None):
+    self.uri = uri
+    self.method = method
+    self.body = body
+    self.headers = headers
+    return httplib2.Response(self.response_headers), self.data
+
+
+class HttpMockSequence(object):
+  """Mock of httplib2.Http
+
+  Mocks a sequence of calls to request returning different responses for each
+  call. Create an instance initialized with the desired response headers
+  and content and then use as if an httplib2.Http instance.
+
+    http = HttpMockSequence([
+      ({'status': '401'}, ''),
+      ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
+      ({'status': '200'}, 'echo_request_headers'),
+      ])
+    resp, content = http.request("http://examples.com")
+
+  There are special values you can pass in for content to trigger
+  behavours that are helpful in testing.
+
+  'echo_request_headers' means return the request headers in the response body
+  'echo_request_headers_as_json' means return the request headers in
+     the response body
+  'echo_request_body' means return the request body in the response body
+  'echo_request_uri' means return the request uri in the response body
+  """
+
+  def __init__(self, iterable):
+    """
+    Args:
+      iterable: iterable, a sequence of pairs of (headers, body)
+    """
+    self._iterable = iterable
+    self.follow_redirects = True
+
+  def request(self, uri,
+              method='GET',
+              body=None,
+              headers=None,
+              redirections=1,
+              connection_type=None):
+    resp, content = self._iterable.pop(0)
+    if content == 'echo_request_headers':
+      content = headers
+    elif content == 'echo_request_headers_as_json':
+      content = simplejson.dumps(headers)
+    elif content == 'echo_request_body':
+      if hasattr(body, 'read'):
+        content = body.read()
+      else:
+        content = body
+    elif content == 'echo_request_uri':
+      content = uri
+    return httplib2.Response(resp), content
+
+
+def set_user_agent(http, user_agent):
+  """Set the user-agent on every request.
+
+  Args:
+     http - An instance of httplib2.Http
+         or something that acts like it.
+     user_agent: string, the value for the user-agent header.
+
+  Returns:
+     A modified instance of http that was passed in.
+
+  Example:
+
+    h = httplib2.Http()
+    h = set_user_agent(h, "my-app-name/6.0")
+
+  Most of the time the user-agent will be set doing auth, this is for the rare
+  cases where you are accessing an unauthenticated endpoint.
+  """
+  request_orig = http.request
+
+  # The closure that will replace 'httplib2.Http.request'.
+  def new_request(uri, method='GET', body=None, headers=None,
+                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                  connection_type=None):
+    """Modify the request headers to add the user-agent."""
+    if headers is None:
+      headers = {}
+    if 'user-agent' in headers:
+      headers['user-agent'] = user_agent + ' ' + headers['user-agent']
+    else:
+      headers['user-agent'] = user_agent
+    resp, content = request_orig(uri, method, body, headers,
+                        redirections, connection_type)
+    return resp, content
+
+  http.request = new_request
+  return http
+
+
+def tunnel_patch(http):
+  """Tunnel PATCH requests over POST.
+  Args:
+     http - An instance of httplib2.Http
+         or something that acts like it.
+
+  Returns:
+     A modified instance of http that was passed in.
+
+  Example:
+
+    h = httplib2.Http()
+    h = tunnel_patch(h, "my-app-name/6.0")
+
+  Useful if you are running on a platform that doesn't support PATCH.
+  Apply this last if you are using OAuth 1.0, as changing the method
+  will result in a different signature.
+  """
+  request_orig = http.request
+
+  # The closure that will replace 'httplib2.Http.request'.
+  def new_request(uri, method='GET', body=None, headers=None,
+                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                  connection_type=None):
+    """Modify the request headers to add the user-agent."""
+    if headers is None:
+      headers = {}
+    if method == 'PATCH':
+      if 'oauth_token' in headers.get('authorization', ''):
+        logging.warning(
+            'OAuth 1.0 request made with Credentials after tunnel_patch.')
+      headers['x-http-method-override'] = "PATCH"
+      method = 'POST'
+    resp, content = request_orig(uri, method, body, headers,
+                        redirections, connection_type)
+    return resp, content
+
+  http.request = new_request
+  return http
diff --git a/apiclient/mimeparse.py b/apiclient/mimeparse.py
new file mode 100644
index 0000000..cbb9d07
--- /dev/null
+++ b/apiclient/mimeparse.py
@@ -0,0 +1,172 @@
+# Copyright (C) 2007 Joe Gregorio
+#
+# Licensed under the MIT License
+
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of the
+HTTP specification [RFC 2616] for a complete explanation.
+
+   http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+ - parse_mime_type():   Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
+                          quality parameter.
+ - quality():           Determines the quality ('q') of a mime-type when
+                          compared against a list of media-ranges.
+ - quality_parsed():    Just like quality() except the second parameter must be
+                          pre-parsed.
+ - best_match():        Choose the mime-type with the highest quality ('q')
+                          from a list of candidates.
+"""
+
+__version__ = '0.1.3'
+__author__ = 'Joe Gregorio'
+__email__ = 'joe@bitworking.org'
+__license__ = 'MIT License'
+__credits__ = ''
+
+
+def parse_mime_type(mime_type):
+    """Parses a mime-type into its component parts.
+
+    Carves up a mime-type and returns a tuple of the (type, subtype, params)
+    where 'params' is a dictionary of all the parameters for the media range.
+    For example, the media range 'application/xhtml;q=0.5' would get parsed
+    into:
+
+       ('application', 'xhtml', {'q', '0.5'})
+       """
+    parts = mime_type.split(';')
+    params = dict([tuple([s.strip() for s in param.split('=', 1)])\
+            for param in parts[1:]
+                  ])
+    full_type = parts[0].strip()
+    # Java URLConnection class sends an Accept header that includes a
+    # single '*'. Turn it into a legal wildcard.
+    if full_type == '*':
+        full_type = '*/*'
+    (type, subtype) = full_type.split('/')
+
+    return (type.strip(), subtype.strip(), params)
+
+
+def parse_media_range(range):
+    """Parse a media-range into its component parts.
+
+    Carves up a media range and returns a tuple of the (type, subtype,
+    params) where 'params' is a dictionary of all the parameters for the media
+    range.  For example, the media range 'application/*;q=0.5' would get parsed
+    into:
+
+       ('application', '*', {'q', '0.5'})
+
+    In addition this function also guarantees that there is a value for 'q'
+    in the params dictionary, filling it in with a proper default if
+    necessary.
+    """
+    (type, subtype, params) = parse_mime_type(range)
+    if not params.has_key('q') or not params['q'] or \
+            not float(params['q']) or float(params['q']) > 1\
+            or float(params['q']) < 0:
+        params['q'] = '1'
+
+    return (type, subtype, params)
+
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a mime-type amongst parsed media-ranges.
+
+    Find the best match for a given mime-type against a list of media_ranges
+    that have already been parsed by parse_media_range(). Returns a tuple of
+    the fitness value and the value of the 'q' quality parameter of the best
+    match, or (-1, 0) if no match was found. Just as for quality_parsed(),
+    'parsed_ranges' must be a list of parsed media ranges.
+    """
+    best_fitness = -1
+    best_fit_q = 0
+    (target_type, target_subtype, target_params) =\
+            parse_media_range(mime_type)
+    for (type, subtype, params) in parsed_ranges:
+        type_match = (type == target_type or\
+                      type == '*' or\
+                      target_type == '*')
+        subtype_match = (subtype == target_subtype or\
+                         subtype == '*' or\
+                         target_subtype == '*')
+        if type_match and subtype_match:
+            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
+                    target_params.iteritems() if key != 'q' and \
+                    params.has_key(key) and value == params[key]], 0)
+            fitness = (type == target_type) and 100 or 0
+            fitness += (subtype == target_subtype) and 10 or 0
+            fitness += param_matches
+            if fitness > best_fitness:
+                best_fitness = fitness
+                best_fit_q = params['q']
+
+    return best_fitness, float(best_fit_q)
+
+
+def quality_parsed(mime_type, parsed_ranges):
+    """Find the best match for a mime-type amongst parsed media-ranges.
+
+    Find the best match for a given mime-type against a list of media_ranges
+    that have already been parsed by parse_media_range(). Returns the 'q'
+    quality parameter of the best match, 0 if no match was found. This function
+    bahaves the same as quality() except that 'parsed_ranges' must be a list of
+    parsed media ranges.
+    """
+
+    return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+
+def quality(mime_type, ranges):
+    """Return the quality ('q') of a mime-type against a list of media-ranges.
+
+    Returns the quality 'q' of a mime-type when compared against the
+    media-ranges in ranges. For example:
+
+    >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
+                  text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+    0.7
+
+    """
+    parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
+
+    return quality_parsed(mime_type, parsed_ranges)
+
+
+def best_match(supported, header):
+    """Return mime-type with the highest quality ('q') from list of candidates.
+
+    Takes a list of supported mime-types and finds the best match for all the
+    media-ranges listed in header. The value of header must be a string that
+    conforms to the format of the HTTP Accept: header. The value of 'supported'
+    is a list of mime-types. The list of supported mime-types should be sorted
+    in order of increasing desirability, in case of a situation where there is
+    a tie.
+
+    >>> best_match(['application/xbel+xml', 'text/xml'],
+                   'text/*;q=0.5,*/*; q=0.1')
+    'text/xml'
+    """
+    split_header = _filter_blank(header.split(','))
+    parsed_header = [parse_media_range(r) for r in split_header]
+    weighted_matches = []
+    pos = 0
+    for mime_type in supported:
+        weighted_matches.append((fitness_and_quality_parsed(mime_type,
+                                 parsed_header), pos, mime_type))
+        pos += 1
+    weighted_matches.sort()
+
+    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
+
+
+def _filter_blank(i):
+    for s in i:
+        if s.strip():
+            yield s
diff --git a/apiclient/model.py b/apiclient/model.py
new file mode 100644
index 0000000..12fcab6
--- /dev/null
+++ b/apiclient/model.py
@@ -0,0 +1,385 @@
+#!/usr/bin/python2.4
+#
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Model objects for requests and responses.
+
+Each API may support one or more serializations, such
+as JSON, Atom, etc. The model classes are responsible
+for converting between the wire format and the Python
+object representation.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import gflags
+import logging
+import urllib
+
+from errors import HttpError
+from oauth2client.anyjson import simplejson
+
+FLAGS = gflags.FLAGS
+
+gflags.DEFINE_boolean('dump_request_response', False,
+                      'Dump all http server requests and responses. '
+                     )
+
+
+def _abstract():
+  raise NotImplementedError('You need to override this function')
+
+
+class Model(object):
+  """Model base class.
+
+  All Model classes should implement this interface.
+  The Model serializes and de-serializes between a wire
+  format such as JSON and a Python object representation.
+  """
+
+  def request(self, headers, path_params, query_params, body_value):
+    """Updates outgoing requests with a serialized body.
+
+    Args:
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query_params: dict, parameters that appear in the query
+      body_value: object, the request body as a Python object, which must be
+                  serializable.
+    Returns:
+      A tuple of (headers, path_params, query, body)
+
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query: string, query part of the request URI
+      body: string, the body serialized in the desired wire format.
+    """
+    _abstract()
+
+  def response(self, resp, content):
+    """Convert the response wire format into a Python object.
+
+    Args:
+      resp: httplib2.Response, the HTTP response headers and status
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+
+    Raises:
+      apiclient.errors.HttpError if a non 2xx response is received.
+    """
+    _abstract()
+
+
+class BaseModel(Model):
+  """Base model class.
+
+  Subclasses should provide implementations for the "serialize" and
+  "deserialize" methods, as well as values for the following class attributes.
+
+  Attributes:
+    accept: The value to use for the HTTP Accept header.
+    content_type: The value to use for the HTTP Content-type header.
+    no_content_response: The value to return when deserializing a 204 "No
+        Content" response.
+    alt_param: The value to supply as the "alt" query parameter for requests.
+  """
+
+  accept = None
+  content_type = None
+  no_content_response = None
+  alt_param = None
+
+  def _log_request(self, headers, path_params, query, body):
+    """Logs debugging information about the request if requested."""
+    if FLAGS.dump_request_response:
+      logging.info('--request-start--')
+      logging.info('-headers-start-')
+      for h, v in headers.iteritems():
+        logging.info('%s: %s', h, v)
+      logging.info('-headers-end-')
+      logging.info('-path-parameters-start-')
+      for h, v in path_params.iteritems():
+        logging.info('%s: %s', h, v)
+      logging.info('-path-parameters-end-')
+      logging.info('body: %s', body)
+      logging.info('query: %s', query)
+      logging.info('--request-end--')
+
+  def request(self, headers, path_params, query_params, body_value):
+    """Updates outgoing requests with a serialized body.
+
+    Args:
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query_params: dict, parameters that appear in the query
+      body_value: object, the request body as a Python object, which must be
+                  serializable by simplejson.
+    Returns:
+      A tuple of (headers, path_params, query, body)
+
+      headers: dict, request headers
+      path_params: dict, parameters that appear in the request path
+      query: string, query part of the request URI
+      body: string, the body serialized as JSON
+    """
+    query = self._build_query(query_params)
+    headers['accept'] = self.accept
+    headers['accept-encoding'] = 'gzip, deflate'
+    if 'user-agent' in headers:
+      headers['user-agent'] += ' '
+    else:
+      headers['user-agent'] = ''
+    headers['user-agent'] += 'google-api-python-client/1.0'
+
+    if body_value is not None:
+      headers['content-type'] = self.content_type
+      body_value = self.serialize(body_value)
+    self._log_request(headers, path_params, query, body_value)
+    return (headers, path_params, query, body_value)
+
+  def _build_query(self, params):
+    """Builds a query string.
+
+    Args:
+      params: dict, the query parameters
+
+    Returns:
+      The query parameters properly encoded into an HTTP URI query string.
+    """
+    if self.alt_param is not None:
+      params.update({'alt': self.alt_param})
+    astuples = []
+    for key, value in params.iteritems():
+      if type(value) == type([]):
+        for x in value:
+          x = x.encode('utf-8')
+          astuples.append((key, x))
+      else:
+        if getattr(value, 'encode', False) and callable(value.encode):
+          value = value.encode('utf-8')
+        astuples.append((key, value))
+    return '?' + urllib.urlencode(astuples)
+
+  def _log_response(self, resp, content):
+    """Logs debugging information about the response if requested."""
+    if FLAGS.dump_request_response:
+      logging.info('--response-start--')
+      for h, v in resp.iteritems():
+        logging.info('%s: %s', h, v)
+      if content:
+        logging.info(content)
+      logging.info('--response-end--')
+
+  def response(self, resp, content):
+    """Convert the response wire format into a Python object.
+
+    Args:
+      resp: httplib2.Response, the HTTP response headers and status
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+
+    Raises:
+      apiclient.errors.HttpError if a non 2xx response is received.
+    """
+    self._log_response(resp, content)
+    # Error handling is TBD, for example, do we retry
+    # for some operation/error combinations?
+    if resp.status < 300:
+      if resp.status == 204:
+        # A 204: No Content response should be treated differently
+        # to all the other success states
+        return self.no_content_response
+      return self.deserialize(content)
+    else:
+      logging.debug('Content from bad request was: %s' % content)
+      raise HttpError(resp, content)
+
+  def serialize(self, body_value):
+    """Perform the actual Python object serialization.
+
+    Args:
+      body_value: object, the request body as a Python object.
+
+    Returns:
+      string, the body in serialized form.
+    """
+    _abstract()
+
+  def deserialize(self, content):
+    """Perform the actual deserialization from response string to Python
+    object.
+
+    Args:
+      content: string, the body of the HTTP response
+
+    Returns:
+      The body de-serialized as a Python object.
+    """
+    _abstract()
+
+
+class JsonModel(BaseModel):
+  """Model class for JSON.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request and response bodies.
+  """
+  accept = 'application/json'
+  content_type = 'application/json'
+  alt_param = 'json'
+
+  def __init__(self, data_wrapper=False):
+    """Construct a JsonModel.
+
+    Args:
+      data_wrapper: boolean, wrap requests and responses in a data wrapper
+    """
+    self._data_wrapper = data_wrapper
+
+  def serialize(self, body_value):
+    if (isinstance(body_value, dict) and 'data' not in body_value and
+        self._data_wrapper):
+      body_value = {'data': body_value}
+    return simplejson.dumps(body_value)
+
+  def deserialize(self, content):
+    body = simplejson.loads(content)
+    if self._data_wrapper and isinstance(body, dict) and 'data' in body:
+      body = body['data']
+    return body
+
+  @property
+  def no_content_response(self):
+    return {}
+
+
+class RawModel(JsonModel):
+  """Model class for requests that don't return JSON.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request, and returns the raw bytes
+  of the response body.
+  """
+  accept = '*/*'
+  content_type = 'application/json'
+  alt_param = None
+
+  def deserialize(self, content):
+    return content
+
+  @property
+  def no_content_response(self):
+    return ''
+
+
+class MediaModel(JsonModel):
+  """Model class for requests that return Media.
+
+  Serializes and de-serializes between JSON and the Python
+  object representation of HTTP request, and returns the raw bytes
+  of the response body.
+  """
+  accept = '*/*'
+  content_type = 'application/json'
+  alt_param = 'media'
+
+  def deserialize(self, content):
+    return content
+
+  @property
+  def no_content_response(self):
+    return ''
+
+
+class ProtocolBufferModel(BaseModel):
+  """Model class for protocol buffers.
+
+  Serializes and de-serializes the binary protocol buffer sent in the HTTP
+  request and response bodies.
+  """
+  accept = 'application/x-protobuf'
+  content_type = 'application/x-protobuf'
+  alt_param = 'proto'
+
+  def __init__(self, protocol_buffer):
+    """Constructs a ProtocolBufferModel.
+
+    The serialzed protocol buffer returned in an HTTP response will be
+    de-serialized using the given protocol buffer class.
+
+    Args:
+      protocol_buffer: The protocol buffer class used to de-serialize a
+      response from the API.
+    """
+    self._protocol_buffer = protocol_buffer
+
+  def serialize(self, body_value):
+    return body_value.SerializeToString()
+
+  def deserialize(self, content):
+    return self._protocol_buffer.FromString(content)
+
+  @property
+  def no_content_response(self):
+    return self._protocol_buffer()
+
+
+def makepatch(original, modified):
+  """Create a patch object.
+
+  Some methods support PATCH, an efficient way to send updates to a resource.
+  This method allows the easy construction of patch bodies by looking at the
+  differences between a resource before and after it was modified.
+
+  Args:
+    original: object, the original deserialized resource
+    modified: object, the modified deserialized resource
+  Returns:
+    An object that contains only the changes from original to modified, in a
+    form suitable to pass to a PATCH method.
+
+  Example usage:
+    item = service.activities().get(postid=postid, userid=userid).execute()
+    original = copy.deepcopy(item)
+    item['object']['content'] = 'This is updated.'
+    service.activities.patch(postid=postid, userid=userid,
+      body=makepatch(original, item)).execute()
+  """
+  patch = {}
+  for key, original_value in original.iteritems():
+    modified_value = modified.get(key, None)
+    if modified_value is None:
+      # Use None to signal that the element is deleted
+      patch[key] = None
+    elif original_value != modified_value:
+      if type(original_value) == type({}):
+        # Recursively descend objects
+        patch[key] = makepatch(original_value, modified_value)
+      else:
+        # In the case of simple types or arrays we just replace
+        patch[key] = modified_value
+    else:
+      # Don't add anything to patch if there's no change
+      pass
+  for key in modified:
+    if key not in original:
+      patch[key] = modified[key]
+
+  return patch
diff --git a/apiclient/oauth.py b/apiclient/oauth.py
new file mode 100644
index 0000000..136adcd
--- /dev/null
+++ b/apiclient/oauth.py
@@ -0,0 +1,443 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for OAuth.
+
+Utilities for making it easier to work with OAuth.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+import copy
+import httplib2
+import logging
+import oauth2 as oauth
+import urllib
+import urlparse
+
+from oauth2client.anyjson import simplejson
+from oauth2client.client import Credentials
+from oauth2client.client import Flow
+from oauth2client.client import Storage
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+
+class Error(Exception):
+  """Base error for this module."""
+  pass
+
+
+class RequestError(Error):
+  """Error occurred during request."""
+  pass
+
+
+class MissingParameter(Error):
+  pass
+
+
+class CredentialsInvalidError(Error):
+  pass
+
+
+def _abstract():
+  raise NotImplementedError('You need to override this function')
+
+
+def _oauth_uri(name, discovery, params):
+  """Look up the OAuth URI from the discovery
+  document and add query parameters based on
+  params.
+
+  name      - The name of the OAuth URI to lookup, one
+              of 'request', 'access', or 'authorize'.
+  discovery - Portion of discovery document the describes
+              the OAuth endpoints.
+  params    - Dictionary that is used to form the query parameters
+              for the specified URI.
+  """
+  if name not in ['request', 'access', 'authorize']:
+    raise KeyError(name)
+  keys = discovery[name]['parameters'].keys()
+  query = {}
+  for key in keys:
+    if key in params:
+      query[key] = params[key]
+  return discovery[name]['url'] + '?' + urllib.urlencode(query)
+
+
+
+class OAuthCredentials(Credentials):
+  """Credentials object for OAuth 1.0a
+  """
+
+  def __init__(self, consumer, token, user_agent):
+    """
+    consumer   - An instance of oauth.Consumer.
+    token      - An instance of oauth.Token constructed with
+                 the access token and secret.
+    user_agent - The HTTP User-Agent to provide for this application.
+    """
+    self.consumer = consumer
+    self.token = token
+    self.user_agent = user_agent
+    self.store = None
+
+    # True if the credentials have been revoked
+    self._invalid = False
+
+  @property
+  def invalid(self):
+    """True if the credentials are invalid, such as being revoked."""
+    return getattr(self, "_invalid", False)
+
+  def set_store(self, store):
+    """Set the storage for the credential.
+
+    Args:
+      store: callable, a callable that when passed a Credential
+        will store the credential back to where it came from.
+        This is needed to store the latest access_token if it
+        has been revoked.
+    """
+    self.store = store
+
+  def __getstate__(self):
+    """Trim the state down to something that can be pickled."""
+    d = copy.copy(self.__dict__)
+    del d['store']
+    return d
+
+  def __setstate__(self, state):
+    """Reconstitute the state of the object from being pickled."""
+    self.__dict__.update(state)
+    self.store = None
+
+  def authorize(self, http):
+    """Authorize an httplib2.Http instance with these Credentials
+
+    Args:
+       http - An instance of httplib2.Http
+           or something that acts like it.
+
+    Returns:
+       A modified instance of http that was passed in.
+
+    Example:
+
+      h = httplib2.Http()
+      h = credentials.authorize(h)
+
+    You can't create a new OAuth
+    subclass of httplib2.Authenication because
+    it never gets passed the absolute URI, which is
+    needed for signing. So instead we have to overload
+    'request' with a closure that adds in the
+    Authorization header and then calls the original version
+    of 'request()'.
+    """
+    request_orig = http.request
+    signer = oauth.SignatureMethod_HMAC_SHA1()
+
+    # The closure that will replace 'httplib2.Http.request'.
+    def new_request(uri, method='GET', body=None, headers=None,
+                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                    connection_type=None):
+      """Modify the request headers to add the appropriate
+      Authorization header."""
+      response_code = 302
+      http.follow_redirects = False
+      while response_code in [301, 302]:
+        req = oauth.Request.from_consumer_and_token(
+            self.consumer, self.token, http_method=method, http_url=uri)
+        req.sign_request(signer, self.consumer, self.token)
+        if headers is None:
+          headers = {}
+        headers.update(req.to_header())
+        if 'user-agent' in headers:
+          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
+        else:
+          headers['user-agent'] = self.user_agent
+
+        resp, content = request_orig(uri, method, body, headers,
+                            redirections, connection_type)
+        response_code = resp.status
+        if response_code in [301, 302]:
+          uri = resp['location']
+
+      # Update the stored credential if it becomes invalid.
+      if response_code == 401:
+        logging.info('Access token no longer valid: %s' % content)
+        self._invalid = True
+        if self.store is not None:
+          self.store(self)
+        raise CredentialsInvalidError("Credentials are no longer valid.")
+
+      return resp, content
+
+    http.request = new_request
+    return http
+
+
+class TwoLeggedOAuthCredentials(Credentials):
+  """Two Legged Credentials object for OAuth 1.0a.
+
+  The Two Legged object is created directly, not from a flow.  Once you
+  authorize and httplib2.Http instance you can change the requestor and that
+  change will propogate to the authorized httplib2.Http instance. For example:
+
+    http = httplib2.Http()
+    http = credentials.authorize(http)
+
+    credentials.requestor = 'foo@example.info'
+    http.request(...)
+    credentials.requestor = 'bar@example.info'
+    http.request(...)
+  """
+
+  def __init__(self, consumer_key, consumer_secret, user_agent):
+    """
+    Args:
+      consumer_key: string, An OAuth 1.0 consumer key
+      consumer_secret: string, An OAuth 1.0 consumer secret
+      user_agent: string, The HTTP User-Agent to provide for this application.
+    """
+    self.consumer = oauth.Consumer(consumer_key, consumer_secret)
+    self.user_agent = user_agent
+    self.store = None
+
+    # email address of the user to act on the behalf of.
+    self._requestor = None
+
+  @property
+  def invalid(self):
+    """True if the credentials are invalid, such as being revoked.
+
+    Always returns False for Two Legged Credentials.
+    """
+    return False
+
+  def getrequestor(self):
+    return self._requestor
+
+  def setrequestor(self, email):
+    self._requestor = email
+
+  requestor = property(getrequestor, setrequestor, None,
+      'The email address of the user to act on behalf of')
+
+  def set_store(self, store):
+    """Set the storage for the credential.
+
+    Args:
+      store: callable, a callable that when passed a Credential
+        will store the credential back to where it came from.
+        This is needed to store the latest access_token if it
+        has been revoked.
+    """
+    self.store = store
+
+  def __getstate__(self):
+    """Trim the state down to something that can be pickled."""
+    d = copy.copy(self.__dict__)
+    del d['store']
+    return d
+
+  def __setstate__(self, state):
+    """Reconstitute the state of the object from being pickled."""
+    self.__dict__.update(state)
+    self.store = None
+
+  def authorize(self, http):
+    """Authorize an httplib2.Http instance with these Credentials
+
+    Args:
+       http - An instance of httplib2.Http
+           or something that acts like it.
+
+    Returns:
+       A modified instance of http that was passed in.
+
+    Example:
+
+      h = httplib2.Http()
+      h = credentials.authorize(h)
+
+    You can't create a new OAuth
+    subclass of httplib2.Authenication because
+    it never gets passed the absolute URI, which is
+    needed for signing. So instead we have to overload
+    'request' with a closure that adds in the
+    Authorization header and then calls the original version
+    of 'request()'.
+    """
+    request_orig = http.request
+    signer = oauth.SignatureMethod_HMAC_SHA1()
+
+    # The closure that will replace 'httplib2.Http.request'.
+    def new_request(uri, method='GET', body=None, headers=None,
+                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                    connection_type=None):
+      """Modify the request headers to add the appropriate
+      Authorization header."""
+      response_code = 302
+      http.follow_redirects = False
+      while response_code in [301, 302]:
+        # add in xoauth_requestor_id=self._requestor to the uri
+        if self._requestor is None:
+          raise MissingParameter(
+              'Requestor must be set before using TwoLeggedOAuthCredentials')
+        parsed = list(urlparse.urlparse(uri))
+        q = parse_qsl(parsed[4])
+        q.append(('xoauth_requestor_id', self._requestor))
+        parsed[4] = urllib.urlencode(q)
+        uri = urlparse.urlunparse(parsed)
+
+        req = oauth.Request.from_consumer_and_token(
+            self.consumer, None, http_method=method, http_url=uri)
+        req.sign_request(signer, self.consumer, None)
+        if headers is None:
+          headers = {}
+        headers.update(req.to_header())
+        if 'user-agent' in headers:
+          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
+        else:
+          headers['user-agent'] = self.user_agent
+        resp, content = request_orig(uri, method, body, headers,
+                            redirections, connection_type)
+        response_code = resp.status
+        if response_code in [301, 302]:
+          uri = resp['location']
+
+      if response_code == 401:
+        logging.info('Access token no longer valid: %s' % content)
+        # Do not store the invalid state of the Credentials because
+        # being 2LO they could be reinstated in the future.
+        raise CredentialsInvalidError("Credentials are invalid.")
+
+      return resp, content
+
+    http.request = new_request
+    return http
+
+
+class FlowThreeLegged(Flow):
+  """Does the Three Legged Dance for OAuth 1.0a.
+  """
+
+  def __init__(self, discovery, consumer_key, consumer_secret, user_agent,
+               **kwargs):
+    """
+    discovery       - Section of the API discovery document that describes
+                      the OAuth endpoints.
+    consumer_key    - OAuth consumer key
+    consumer_secret - OAuth consumer secret
+    user_agent      - The HTTP User-Agent that identifies the application.
+    **kwargs        - The keyword arguments are all optional and required
+                      parameters for the OAuth calls.
+    """
+    self.discovery = discovery
+    self.consumer_key = consumer_key
+    self.consumer_secret = consumer_secret
+    self.user_agent = user_agent
+    self.params = kwargs
+    self.request_token = {}
+    required = {}
+    for uriinfo in discovery.itervalues():
+      for name, value in uriinfo['parameters'].iteritems():
+        if value['required'] and not name.startswith('oauth_'):
+          required[name] = 1
+    for key in required.iterkeys():
+      if key not in self.params:
+        raise MissingParameter('Required parameter %s not supplied' % key)
+
+  def step1_get_authorize_url(self, oauth_callback='oob'):
+    """Returns a URI to redirect to the provider.
+
+    oauth_callback - Either the string 'oob' for a non-web-based application,
+                     or a URI that handles the callback from the authorization
+                     server.
+
+    If oauth_callback is 'oob' then pass in the
+    generated verification code to step2_exchange,
+    otherwise pass in the query parameters received
+    at the callback uri to step2_exchange.
+    """
+    consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
+    client = oauth.Client(consumer)
+
+    headers = {
+        'user-agent': self.user_agent,
+        'content-type': 'application/x-www-form-urlencoded'
+    }
+    body = urllib.urlencode({'oauth_callback': oauth_callback})
+    uri = _oauth_uri('request', self.discovery, self.params)
+
+    resp, content = client.request(uri, 'POST', headers=headers,
+                                   body=body)
+    if resp['status'] != '200':
+      logging.error('Failed to retrieve temporary authorization: %s', content)
+      raise RequestError('Invalid response %s.' % resp['status'])
+
+    self.request_token = dict(parse_qsl(content))
+
+    auth_params = copy.copy(self.params)
+    auth_params['oauth_token'] = self.request_token['oauth_token']
+
+    return _oauth_uri('authorize', self.discovery, auth_params)
+
+  def step2_exchange(self, verifier):
+    """Exhanges an authorized request token
+    for OAuthCredentials.
+
+    Args:
+      verifier: string, dict - either the verifier token, or a dictionary
+        of the query parameters to the callback, which contains
+        the oauth_verifier.
+    Returns:
+       The Credentials object.
+    """
+
+    if not (isinstance(verifier, str) or isinstance(verifier, unicode)):
+      verifier = verifier['oauth_verifier']
+
+    token = oauth.Token(
+        self.request_token['oauth_token'],
+        self.request_token['oauth_token_secret'])
+    token.set_verifier(verifier)
+    consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
+    client = oauth.Client(consumer, token)
+
+    headers = {
+        'user-agent': self.user_agent,
+        'content-type': 'application/x-www-form-urlencoded'
+    }
+
+    uri = _oauth_uri('access', self.discovery, self.params)
+    resp, content = client.request(uri, 'POST', headers=headers)
+    if resp['status'] != '200':
+      logging.error('Failed to retrieve access token: %s', content)
+      raise RequestError('Invalid response %s.' % resp['status'])
+
+    oauth_params = dict(parse_qsl(content))
+    token = oauth.Token(
+        oauth_params['oauth_token'],
+        oauth_params['oauth_token_secret'])
+
+    return OAuthCredentials(consumer, token, self.user_agent)
diff --git a/apiclient/push.py b/apiclient/push.py
new file mode 100644
index 0000000..c520faf
--- /dev/null
+++ b/apiclient/push.py
@@ -0,0 +1,274 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Push notifications support.
+
+This code is based on experimental APIs and is subject to change.
+"""
+
+__author__ = 'afshar@google.com (Ali Afshar)'
+
+import binascii
+import collections
+import os
+import urllib
+
+SUBSCRIBE = 'X-GOOG-SUBSCRIBE'
+SUBSCRIPTION_ID = 'X-GOOG-SUBSCRIPTION-ID'
+TOPIC_ID = 'X-GOOG-TOPIC-ID'
+TOPIC_URI = 'X-GOOG-TOPIC-URI'
+CLIENT_TOKEN = 'X-GOOG-CLIENT-TOKEN'
+EVENT_TYPE = 'X-GOOG-EVENT-TYPE'
+UNSUBSCRIBE = 'X-GOOG-UNSUBSCRIBE'
+
+
+class InvalidSubscriptionRequestError(ValueError):
+  """The request cannot be subscribed."""
+
+
+def new_token():
+  """Gets a random token for use as a client_token in push notifications.
+
+  Returns:
+    str, a new random token.
+  """
+  return binascii.hexlify(os.urandom(32))
+
+
+class Channel(object):
+  """Base class for channel types."""
+
+  def __init__(self, channel_type, channel_args):
+    """Create a new Channel.
+
+    You probably won't need to create this channel manually, since there are
+    subclassed Channel for each specific type with a more customized set of
+    arguments to pass. However, you may wish to just create it manually here.
+
+    Args:
+      channel_type: str, the type of channel.
+      channel_args: dict, arguments to pass to the channel.
+    """
+    self.channel_type = channel_type
+    self.channel_args = channel_args
+
+  def as_header_value(self):
+    """Create the appropriate header for this channel.
+
+    Returns:
+      str encoded channel description suitable for use as a header.
+    """
+    return '%s?%s' % (self.channel_type, urllib.urlencode(self.channel_args))
+
+  def write_header(self, headers):
+    """Write the appropriate subscribe header to a headers dict.
+
+    Args:
+      headers: dict, headers to add subscribe header to.
+    """
+    headers[SUBSCRIBE] = self.as_header_value()
+
+
+class WebhookChannel(Channel):
+  """Channel for registering web hook notifications."""
+
+  def __init__(self, url, app_engine=False):
+    """Create a new WebhookChannel
+
+    Args:
+      url: str, URL to post notifications to.
+      app_engine: bool, default=False, whether the destination for the
+      notifications is an App Engine application.
+    """
+    super(WebhookChannel, self).__init__(
+        channel_type='web_hook',
+        channel_args={
+            'url': url,
+            'app_engine': app_engine and 'true' or 'false',
+        }
+    )
+
+
+class Headers(collections.defaultdict):
+  """Headers for managing subscriptions."""
+
+
+  ALL_HEADERS = set([SUBSCRIBE, SUBSCRIPTION_ID, TOPIC_ID, TOPIC_URI,
+                     CLIENT_TOKEN, EVENT_TYPE, UNSUBSCRIBE])
+
+  def __init__(self):
+    """Create a new subscription configuration instance."""
+    collections.defaultdict.__init__(self, str)
+
+  def __setitem__(self, key, value):
+    """Set a header value, ensuring the key is an allowed value.
+
+    Args:
+      key: str, the header key.
+      value: str, the header value.
+    Raises:
+      ValueError if key is not one of the accepted headers.
+    """
+    normal_key = self._normalize_key(key)
+    if normal_key not in self.ALL_HEADERS:
+      raise ValueError('Header name must be one of %s.' % self.ALL_HEADERS)
+    else:
+      return collections.defaultdict.__setitem__(self, normal_key, value)
+
+  def __getitem__(self, key):
+    """Get a header value, normalizing the key case.
+
+    Args:
+      key: str, the header key.
+    Returns:
+      String header value.
+    Raises:
+      KeyError if the key is not one of the accepted headers.
+    """
+    normal_key = self._normalize_key(key)
+    if normal_key not in self.ALL_HEADERS:
+      raise ValueError('Header name must be one of %s.' % self.ALL_HEADERS)
+    else:
+      return collections.defaultdict.__getitem__(self, normal_key)
+
+  def _normalize_key(self, key):
+    """Normalize a header name for use as a key."""
+    return key.upper()
+
+  def items(self):
+    """Generator for each header."""
+    for header in self.ALL_HEADERS:
+      value = self[header]
+      if value:
+        yield header, value
+
+  def write(self, headers):
+    """Applies the subscription headers.
+
+    Args:
+      headers: dict of headers to insert values into.
+    """
+    for header, value in self.items():
+      headers[header.lower()] = value
+
+  def read(self, headers):
+    """Read from headers.
+
+    Args:
+      headers: dict of headers to read from.
+    """
+    for header in self.ALL_HEADERS:
+      if header.lower() in headers:
+        self[header] = headers[header.lower()]
+
+
+class Subscription(object):
+  """Information about a subscription."""
+
+  def __init__(self):
+    """Create a new Subscription."""
+    self.headers = Headers()
+
+  @classmethod
+  def for_request(cls, request, channel, client_token=None):
+    """Creates a subscription and attaches it to a request.
+
+    Args:
+      request: An http.HttpRequest to modify for making a subscription.
+      channel: A apiclient.push.Channel describing the subscription to
+               create.
+      client_token: (optional) client token to verify the notification.
+
+    Returns:
+      New subscription object.
+    """
+    subscription = cls.for_channel(channel=channel, client_token=client_token)
+    subscription.headers.write(request.headers)
+    if request.method != 'GET':
+      raise InvalidSubscriptionRequestError(
+          'Can only subscribe to requests which are GET.')
+    request.method = 'POST'
+
+    def _on_response(response, subscription=subscription):
+      """Called with the response headers. Reads the subscription headers."""
+      subscription.headers.read(response)
+
+    request.add_response_callback(_on_response)
+    return subscription
+
+  @classmethod
+  def for_channel(cls, channel, client_token=None):
+    """Alternate constructor to create a subscription from a channel.
+
+    Args:
+      channel: A apiclient.push.Channel describing the subscription to
+               create.
+      client_token: (optional) client token to verify the notification.
+
+    Returns:
+      New subscription object.
+    """
+    subscription = cls()
+    channel.write_header(subscription.headers)
+    if client_token is None:
+      client_token = new_token()
+    subscription.headers[SUBSCRIPTION_ID] = new_token()
+    subscription.headers[CLIENT_TOKEN] = client_token
+    return subscription
+
+  def verify(self, headers):
+    """Verifies that a webhook notification has the correct client_token.
+
+    Args:
+      headers: dict of request headers for a push notification.
+
+    Returns:
+      Boolean value indicating whether the notification is verified.
+    """
+    new_subscription = Subscription()
+    new_subscription.headers.read(headers)
+    return new_subscription.client_token == self.client_token
+
+  @property
+  def subscribe(self):
+    """Subscribe header value."""
+    return self.headers[SUBSCRIBE]
+
+  @property
+  def subscription_id(self):
+    """Subscription ID header value."""
+    return self.headers[SUBSCRIPTION_ID]
+
+  @property
+  def topic_id(self):
+    """Topic ID header value."""
+    return self.headers[TOPIC_ID]
+
+  @property
+  def topic_uri(self):
+    """Topic URI header value."""
+    return self.headers[TOPIC_URI]
+
+  @property
+  def client_token(self):
+    """Client Token header value."""
+    return self.headers[CLIENT_TOKEN]
+
+  @property
+  def event_type(self):
+    """Event Type header value."""
+    return self.headers[EVENT_TYPE]
+
+  @property
+  def unsubscribe(self):
+    """Unsuscribe header value."""
+    return self.headers[UNSUBSCRIBE]
diff --git a/apiclient/schema.py b/apiclient/schema.py
new file mode 100644
index 0000000..d076a86
--- /dev/null
+++ b/apiclient/schema.py
@@ -0,0 +1,312 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Schema processing for discovery based APIs
+
+Schemas holds an APIs discovery schemas. It can return those schema as
+deserialized JSON objects, or pretty print them as prototype objects that
+conform to the schema.
+
+For example, given the schema:
+
+ schema = \"\"\"{
+   "Foo": {
+    "type": "object",
+    "properties": {
+     "etag": {
+      "type": "string",
+      "description": "ETag of the collection."
+     },
+     "kind": {
+      "type": "string",
+      "description": "Type of the collection ('calendar#acl').",
+      "default": "calendar#acl"
+     },
+     "nextPageToken": {
+      "type": "string",
+      "description": "Token used to access the next
+         page of this result. Omitted if no further results are available."
+     }
+    }
+   }
+ }\"\"\"
+
+ s = Schemas(schema)
+ print s.prettyPrintByName('Foo')
+
+ Produces the following output:
+
+  {
+   "nextPageToken": "A String", # Token used to access the
+       # next page of this result. Omitted if no further results are available.
+   "kind": "A String", # Type of the collection ('calendar#acl').
+   "etag": "A String", # ETag of the collection.
+  },
+
+The constructor takes a discovery document in which to look up named schema.
+"""
+
+# TODO(jcgregorio) support format, enum, minimum, maximum
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import copy
+
+from oauth2client import util
+from oauth2client.anyjson import simplejson
+
+
+class Schemas(object):
+  """Schemas for an API."""
+
+  def __init__(self, discovery):
+    """Constructor.
+
+    Args:
+      discovery: object, Deserialized discovery document from which we pull
+        out the named schema.
+    """
+    self.schemas = discovery.get('schemas', {})
+
+    # Cache of pretty printed schemas.
+    self.pretty = {}
+
+  @util.positional(2)
+  def _prettyPrintByName(self, name, seen=None, dent=0):
+    """Get pretty printed object prototype from the schema name.
+
+    Args:
+      name: string, Name of schema in the discovery document.
+      seen: list of string, Names of schema already seen. Used to handle
+        recursive definitions.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    if seen is None:
+      seen = []
+
+    if name in seen:
+      # Do not fall into an infinite loop over recursive definitions.
+      return '# Object with schema name: %s' % name
+    seen.append(name)
+
+    if name not in self.pretty:
+      self.pretty[name] = _SchemaToStruct(self.schemas[name],
+          seen, dent=dent).to_str(self._prettyPrintByName)
+
+    seen.pop()
+
+    return self.pretty[name]
+
+  def prettyPrintByName(self, name):
+    """Get pretty printed object prototype from the schema name.
+
+    Args:
+      name: string, Name of schema in the discovery document.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    # Return with trailing comma and newline removed.
+    return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
+
+  @util.positional(2)
+  def _prettyPrintSchema(self, schema, seen=None, dent=0):
+    """Get pretty printed object prototype of schema.
+
+    Args:
+      schema: object, Parsed JSON schema.
+      seen: list of string, Names of schema already seen. Used to handle
+        recursive definitions.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    if seen is None:
+      seen = []
+
+    return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
+
+  def prettyPrintSchema(self, schema):
+    """Get pretty printed object prototype of schema.
+
+    Args:
+      schema: object, Parsed JSON schema.
+
+    Returns:
+      string, A string that contains a prototype object with
+        comments that conforms to the given schema.
+    """
+    # Return with trailing comma and newline removed.
+    return self._prettyPrintSchema(schema, dent=1)[:-2]
+
+  def get(self, name):
+    """Get deserialized JSON schema from the schema name.
+
+    Args:
+      name: string, Schema name.
+    """
+    return self.schemas[name]
+
+
+class _SchemaToStruct(object):
+  """Convert schema to a prototype object."""
+
+  @util.positional(3)
+  def __init__(self, schema, seen, dent=0):
+    """Constructor.
+
+    Args:
+      schema: object, Parsed JSON schema.
+      seen: list, List of names of schema already seen while parsing. Used to
+        handle recursive definitions.
+      dent: int, Initial indentation depth.
+    """
+    # The result of this parsing kept as list of strings.
+    self.value = []
+
+    # The final value of the parsing.
+    self.string = None
+
+    # The parsed JSON schema.
+    self.schema = schema
+
+    # Indentation level.
+    self.dent = dent
+
+    # Method that when called returns a prototype object for the schema with
+    # the given name.
+    self.from_cache = None
+
+    # List of names of schema already seen while parsing.
+    self.seen = seen
+
+  def emit(self, text):
+    """Add text as a line to the output.
+
+    Args:
+      text: string, Text to output.
+    """
+    self.value.extend(["  " * self.dent, text, '\n'])
+
+  def emitBegin(self, text):
+    """Add text to the output, but with no line terminator.
+
+    Args:
+      text: string, Text to output.
+      """
+    self.value.extend(["  " * self.dent, text])
+
+  def emitEnd(self, text, comment):
+    """Add text and comment to the output with line terminator.
+
+    Args:
+      text: string, Text to output.
+      comment: string, Python comment.
+    """
+    if comment:
+      divider = '\n' + '  ' * (self.dent + 2) + '# '
+      lines = comment.splitlines()
+      lines = [x.rstrip() for x in lines]
+      comment = divider.join(lines)
+      self.value.extend([text, ' # ', comment, '\n'])
+    else:
+      self.value.extend([text, '\n'])
+
+  def indent(self):
+    """Increase indentation level."""
+    self.dent += 1
+
+  def undent(self):
+    """Decrease indentation level."""
+    self.dent -= 1
+
+  def _to_str_impl(self, schema):
+    """Prototype object based on the schema, in Python code with comments.
+
+    Args:
+      schema: object, Parsed JSON schema file.
+
+    Returns:
+      Prototype object based on the schema, in Python code with comments.
+    """
+    stype = schema.get('type')
+    if stype == 'object':
+      self.emitEnd('{', schema.get('description', ''))
+      self.indent()
+      if 'properties' in schema:
+        for pname, pschema in schema.get('properties', {}).iteritems():
+          self.emitBegin('"%s": ' % pname)
+          self._to_str_impl(pschema)
+      elif 'additionalProperties' in schema:
+        self.emitBegin('"a_key": ')
+        self._to_str_impl(schema['additionalProperties'])
+      self.undent()
+      self.emit('},')
+    elif '$ref' in schema:
+      schemaName = schema['$ref']
+      description = schema.get('description', '')
+      s = self.from_cache(schemaName, seen=self.seen)
+      parts = s.splitlines()
+      self.emitEnd(parts[0], description)
+      for line in parts[1:]:
+        self.emit(line.rstrip())
+    elif stype == 'boolean':
+      value = schema.get('default', 'True or False')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'string':
+      value = schema.get('default', 'A String')
+      self.emitEnd('"%s",' % str(value), schema.get('description', ''))
+    elif stype == 'integer':
+      value = schema.get('default', '42')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'number':
+      value = schema.get('default', '3.14')
+      self.emitEnd('%s,' % str(value), schema.get('description', ''))
+    elif stype == 'null':
+      self.emitEnd('None,', schema.get('description', ''))
+    elif stype == 'any':
+      self.emitEnd('"",', schema.get('description', ''))
+    elif stype == 'array':
+      self.emitEnd('[', schema.get('description'))
+      self.indent()
+      self.emitBegin('')
+      self._to_str_impl(schema['items'])
+      self.undent()
+      self.emit('],')
+    else:
+      self.emit('Unknown type! %s' % stype)
+      self.emitEnd('', '')
+
+    self.string = ''.join(self.value)
+    return self.string
+
+  def to_str(self, from_cache):
+    """Prototype object based on the schema, in Python code with comments.
+
+    Args:
+      from_cache: callable(name, seen), Callable that retrieves an object
+         prototype for a schema with the given name. Seen is a list of schema
+         names already seen as we recursively descend the schema definition.
+
+    Returns:
+      Prototype object based on the schema, in Python code with comments.
+      The lines of the code will all be properly indented.
+    """
+    self.from_cache = from_cache
+    return self._to_str_impl(self.schema)
diff --git a/dev_appserver.py b/dev_appserver.py
index dd75b28..1c48e39 100644
--- a/dev_appserver.py
+++ b/dev_appserver.py
@@ -114,6 +114,8 @@
     ]
 
 _PHP_RUNTIME_PATHS = [
+    _DIR_PATH,
+
     os.path.join(_DIR_PATH, 'lib', 'concurrent'),
     os.path.join(_DIR_PATH, 'lib', 'cherrypy'),
     os.path.join(_DIR_PATH, 'lib', 'yaml-3.10'),
diff --git a/gflags/__init__.py b/gflags/__init__.py
new file mode 100644
index 0000000..77e4285
--- /dev/null
+++ b/gflags/__init__.py
@@ -0,0 +1 @@
+from gflags import *
diff --git a/gflags/gflags.py b/gflags/gflags.py
new file mode 100644
index 0000000..822256a
--- /dev/null
+++ b/gflags/gflags.py
@@ -0,0 +1,2862 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2002, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# ---
+# Author: Chad Lester
+# Design and style contributions by:
+#   Amit Patel, Bogdan Cocosel, Daniel Dulitz, Eric Tiedemann,
+#   Eric Veach, Laurence Gonsalves, Matthew Springer
+# Code reorganized a bit by Craig Silverstein
+
+"""This module is used to define and parse command line flags.
+
+This module defines a *distributed* flag-definition policy: rather than
+an application having to define all flags in or near main(), each python
+module defines flags that are useful to it.  When one python module
+imports another, it gains access to the other's flags.  (This is
+implemented by having all modules share a common, global registry object
+containing all the flag information.)
+
+Flags are defined through the use of one of the DEFINE_xxx functions.
+The specific function used determines how the flag is parsed, checked,
+and optionally type-converted, when it's seen on the command line.
+
+
+IMPLEMENTATION: DEFINE_* creates a 'Flag' object and registers it with a
+'FlagValues' object (typically the global FlagValues FLAGS, defined
+here).  The 'FlagValues' object can scan the command line arguments and
+pass flag arguments to the corresponding 'Flag' objects for
+value-checking and type conversion.  The converted flag values are
+available as attributes of the 'FlagValues' object.
+
+Code can access the flag through a FlagValues object, for instance
+gflags.FLAGS.myflag.  Typically, the __main__ module passes the command
+line arguments to gflags.FLAGS for parsing.
+
+At bottom, this module calls getopt(), so getopt functionality is
+supported, including short- and long-style flags, and the use of -- to
+terminate flags.
+
+Methods defined by the flag module will throw 'FlagsError' exceptions.
+The exception argument will be a human-readable string.
+
+
+FLAG TYPES: This is a list of the DEFINE_*'s that you can do.  All flags
+take a name, default value, help-string, and optional 'short' name
+(one-letter name).  Some flags have other arguments, which are described
+with the flag.
+
+DEFINE_string: takes any input, and interprets it as a string.
+
+DEFINE_bool or
+DEFINE_boolean: typically does not take an argument: say --myflag to
+                set FLAGS.myflag to true, or --nomyflag to set
+                FLAGS.myflag to false.  Alternately, you can say
+                   --myflag=true  or --myflag=t or --myflag=1  or
+                   --myflag=false or --myflag=f or --myflag=0
+
+DEFINE_float: takes an input and interprets it as a floating point
+              number.  Takes optional args lower_bound and upper_bound;
+              if the number specified on the command line is out of
+              range, it will raise a FlagError.
+
+DEFINE_integer: takes an input and interprets it as an integer.  Takes
+                optional args lower_bound and upper_bound as for floats.
+
+DEFINE_enum: takes a list of strings which represents legal values.  If
+             the command-line value is not in this list, raise a flag
+             error.  Otherwise, assign to FLAGS.flag as a string.
+
+DEFINE_list: Takes a comma-separated list of strings on the commandline.
+             Stores them in a python list object.
+
+DEFINE_spaceseplist: Takes a space-separated list of strings on the
+                     commandline.  Stores them in a python list object.
+                     Example: --myspacesepflag "foo bar baz"
+
+DEFINE_multistring: The same as DEFINE_string, except the flag can be
+                    specified more than once on the commandline.  The
+                    result is a python list object (list of strings),
+                    even if the flag is only on the command line once.
+
+DEFINE_multi_int: The same as DEFINE_integer, except the flag can be
+                  specified more than once on the commandline.  The
+                  result is a python list object (list of ints), even if
+                  the flag is only on the command line once.
+
+
+SPECIAL FLAGS: There are a few flags that have special meaning:
+   --help          prints a list of all the flags in a human-readable fashion
+   --helpshort     prints a list of all key flags (see below).
+   --helpxml       prints a list of all flags, in XML format.  DO NOT parse
+                   the output of --help and --helpshort.  Instead, parse
+                   the output of --helpxml.  For more info, see
+                   "OUTPUT FOR --helpxml" below.
+   --flagfile=foo  read flags from file foo.
+   --undefok=f1,f2 ignore unrecognized option errors for f1,f2.
+                   For boolean flags, you should use --undefok=boolflag, and
+                   --boolflag and --noboolflag will be accepted.  Do not use
+                   --undefok=noboolflag.
+   --              as in getopt(), terminates flag-processing
+
+
+FLAGS VALIDATORS: If your program:
+  - requires flag X to be specified
+  - needs flag Y to match a regular expression
+  - or requires any more general constraint to be satisfied
+then validators are for you!
+
+Each validator represents a constraint over one flag, which is enforced
+starting from the initial parsing of the flags and until the program
+terminates.
+
+Also, lower_bound and upper_bound for numerical flags are enforced using flag
+validators.
+
+Howto:
+If you want to enforce a constraint over one flag, use
+
+gflags.RegisterValidator(flag_name,
+                        checker,
+                        message='Flag validation failed',
+                        flag_values=FLAGS)
+
+After flag values are initially parsed, and after any change to the specified
+flag, method checker(flag_value) will be executed. If constraint is not
+satisfied, an IllegalFlagValue exception will be raised. See
+RegisterValidator's docstring for a detailed explanation on how to construct
+your own checker.
+
+
+EXAMPLE USAGE:
+
+FLAGS = gflags.FLAGS
+
+gflags.DEFINE_integer('my_version', 0, 'Version number.')
+gflags.DEFINE_string('filename', None, 'Input file name', short_name='f')
+
+gflags.RegisterValidator('my_version',
+                        lambda value: value % 2 == 0,
+                        message='--my_version must be divisible by 2')
+gflags.MarkFlagAsRequired('filename')
+
+
+NOTE ON --flagfile:
+
+Flags may be loaded from text files in addition to being specified on
+the commandline.
+
+Any flags you don't feel like typing, throw them in a file, one flag per
+line, for instance:
+   --myflag=myvalue
+   --nomyboolean_flag
+You then specify your file with the special flag '--flagfile=somefile'.
+You CAN recursively nest flagfile= tokens OR use multiple files on the
+command line.  Lines beginning with a single hash '#' or a double slash
+'//' are comments in your flagfile.
+
+Any flagfile=<file> will be interpreted as having a relative path from
+the current working directory rather than from the place the file was
+included from:
+   myPythonScript.py --flagfile=config/somefile.cfg
+
+If somefile.cfg includes further --flagfile= directives, these will be
+referenced relative to the original CWD, not from the directory the
+including flagfile was found in!
+
+The caveat applies to people who are including a series of nested files
+in a different dir than they are executing out of.  Relative path names
+are always from CWD, not from the directory of the parent include
+flagfile. We do now support '~' expanded directory names.
+
+Absolute path names ALWAYS work!
+
+
+EXAMPLE USAGE:
+
+
+  FLAGS = gflags.FLAGS
+
+  # Flag names are globally defined!  So in general, we need to be
+  # careful to pick names that are unlikely to be used by other libraries.
+  # If there is a conflict, we'll get an error at import time.
+  gflags.DEFINE_string('name', 'Mr. President', 'your name')
+  gflags.DEFINE_integer('age', None, 'your age in years', lower_bound=0)
+  gflags.DEFINE_boolean('debug', False, 'produces debugging output')
+  gflags.DEFINE_enum('gender', 'male', ['male', 'female'], 'your gender')
+
+  def main(argv):
+    try:
+      argv = FLAGS(argv)  # parse flags
+    except gflags.FlagsError, e:
+      print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS)
+      sys.exit(1)
+    if FLAGS.debug: print 'non-flag arguments:', argv
+    print 'Happy Birthday', FLAGS.name
+    if FLAGS.age is not None:
+      print 'You are a %d year old %s' % (FLAGS.age, FLAGS.gender)
+
+  if __name__ == '__main__':
+    main(sys.argv)
+
+
+KEY FLAGS:
+
+As we already explained, each module gains access to all flags defined
+by all the other modules it transitively imports.  In the case of
+non-trivial scripts, this means a lot of flags ...  For documentation
+purposes, it is good to identify the flags that are key (i.e., really
+important) to a module.  Clearly, the concept of "key flag" is a
+subjective one.  When trying to determine whether a flag is key to a
+module or not, assume that you are trying to explain your module to a
+potential user: which flags would you really like to mention first?
+
+We'll describe shortly how to declare which flags are key to a module.
+For the moment, assume we know the set of key flags for each module.
+Then, if you use the app.py module, you can use the --helpshort flag to
+print only the help for the flags that are key to the main module, in a
+human-readable format.
+
+NOTE: If you need to parse the flag help, do NOT use the output of
+--help / --helpshort.  That output is meant for human consumption, and
+may be changed in the future.  Instead, use --helpxml; flags that are
+key for the main module are marked there with a <key>yes</key> element.
+
+The set of key flags for a module M is composed of:
+
+1. Flags defined by module M by calling a DEFINE_* function.
+
+2. Flags that module M explictly declares as key by using the function
+
+     DECLARE_key_flag(<flag_name>)
+
+3. Key flags of other modules that M specifies by using the function
+
+     ADOPT_module_key_flags(<other_module>)
+
+   This is a "bulk" declaration of key flags: each flag that is key for
+   <other_module> becomes key for the current module too.
+
+Notice that if you do not use the functions described at points 2 and 3
+above, then --helpshort prints information only about the flags defined
+by the main module of our script.  In many cases, this behavior is good
+enough.  But if you move part of the main module code (together with the
+related flags) into a different module, then it is nice to use
+DECLARE_key_flag / ADOPT_module_key_flags and make sure --helpshort
+lists all relevant flags (otherwise, your code refactoring may confuse
+your users).
+
+Note: each of DECLARE_key_flag / ADOPT_module_key_flags has its own
+pluses and minuses: DECLARE_key_flag is more targeted and may lead a
+more focused --helpshort documentation.  ADOPT_module_key_flags is good
+for cases when an entire module is considered key to the current script.
+Also, it does not require updates to client scripts when a new flag is
+added to the module.
+
+
+EXAMPLE USAGE 2 (WITH KEY FLAGS):
+
+Consider an application that contains the following three files (two
+auxiliary modules and a main module)
+
+File libfoo.py:
+
+  import gflags
+
+  gflags.DEFINE_integer('num_replicas', 3, 'Number of replicas to start')
+  gflags.DEFINE_boolean('rpc2', True, 'Turn on the usage of RPC2.')
+
+  ... some code ...
+
+File libbar.py:
+
+  import gflags
+
+  gflags.DEFINE_string('bar_gfs_path', '/gfs/path',
+                      'Path to the GFS files for libbar.')
+  gflags.DEFINE_string('email_for_bar_errors', 'bar-team@google.com',
+                      'Email address for bug reports about module libbar.')
+  gflags.DEFINE_boolean('bar_risky_hack', False,
+                       'Turn on an experimental and buggy optimization.')
+
+  ... some code ...
+
+File myscript.py:
+
+  import gflags
+  import libfoo
+  import libbar
+
+  gflags.DEFINE_integer('num_iterations', 0, 'Number of iterations.')
+
+  # Declare that all flags that are key for libfoo are
+  # key for this module too.
+  gflags.ADOPT_module_key_flags(libfoo)
+
+  # Declare that the flag --bar_gfs_path (defined in libbar) is key
+  # for this module.
+  gflags.DECLARE_key_flag('bar_gfs_path')
+
+  ... some code ...
+
+When myscript is invoked with the flag --helpshort, the resulted help
+message lists information about all the key flags for myscript:
+--num_iterations, --num_replicas, --rpc2, and --bar_gfs_path.
+
+Of course, myscript uses all the flags declared by it (in this case,
+just --num_replicas) or by any of the modules it transitively imports
+(e.g., the modules libfoo, libbar).  E.g., it can access the value of
+FLAGS.bar_risky_hack, even if --bar_risky_hack is not declared as a key
+flag for myscript.
+
+
+OUTPUT FOR --helpxml:
+
+The --helpxml flag generates output with the following structure:
+
+<?xml version="1.0"?>
+<AllFlags>
+  <program>PROGRAM_BASENAME</program>
+  <usage>MAIN_MODULE_DOCSTRING</usage>
+  (<flag>
+    [<key>yes</key>]
+    <file>DECLARING_MODULE</file>
+    <name>FLAG_NAME</name>
+    <meaning>FLAG_HELP_MESSAGE</meaning>
+    <default>DEFAULT_FLAG_VALUE</default>
+    <current>CURRENT_FLAG_VALUE</current>
+    <type>FLAG_TYPE</type>
+    [OPTIONAL_ELEMENTS]
+  </flag>)*
+</AllFlags>
+
+Notes:
+
+1. The output is intentionally similar to the output generated by the
+C++ command-line flag library.  The few differences are due to the
+Python flags that do not have a C++ equivalent (at least not yet),
+e.g., DEFINE_list.
+
+2. New XML elements may be added in the future.
+
+3. DEFAULT_FLAG_VALUE is in serialized form, i.e., the string you can
+pass for this flag on the command-line.  E.g., for a flag defined
+using DEFINE_list, this field may be foo,bar, not ['foo', 'bar'].
+
+4. CURRENT_FLAG_VALUE is produced using str().  This means that the
+string 'false' will be represented in the same way as the boolean
+False.  Using repr() would have removed this ambiguity and simplified
+parsing, but would have broken the compatibility with the C++
+command-line flags.
+
+5. OPTIONAL_ELEMENTS describe elements relevant for certain kinds of
+flags: lower_bound, upper_bound (for flags that specify bounds),
+enum_value (for enum flags), list_separator (for flags that consist of
+a list of values, separated by a special token).
+
+6. We do not provide any example here: please use --helpxml instead.
+
+This module requires at least python 2.2.1 to run.
+"""
+
+import cgi
+import getopt
+import os
+import re
+import string
+import struct
+import sys
+# pylint: disable-msg=C6204
+try:
+  import fcntl
+except ImportError:
+  fcntl = None
+try:
+  # Importing termios will fail on non-unix platforms.
+  import termios
+except ImportError:
+  termios = None
+
+import gflags_validators
+# pylint: enable-msg=C6204
+
+
+# Are we running under pychecker?
+_RUNNING_PYCHECKER = 'pychecker.python' in sys.modules
+
+
+def _GetCallingModuleObjectAndName():
+  """Returns the module that's calling into this module.
+
+  We generally use this function to get the name of the module calling a
+  DEFINE_foo... function.
+  """
+  # Walk down the stack to find the first globals dict that's not ours.
+  for depth in range(1, sys.getrecursionlimit()):
+    if not sys._getframe(depth).f_globals is globals():
+      globals_for_frame = sys._getframe(depth).f_globals
+      module, module_name = _GetModuleObjectAndName(globals_for_frame)
+      if module_name is not None:
+        return module, module_name
+  raise AssertionError("No module was found")
+
+
+def _GetCallingModule():
+  """Returns the name of the module that's calling into this module."""
+  return _GetCallingModuleObjectAndName()[1]
+
+
+def _GetThisModuleObjectAndName():
+  """Returns: (module object, module name) for this module."""
+  return _GetModuleObjectAndName(globals())
+
+
+# module exceptions:
+class FlagsError(Exception):
+  """The base class for all flags errors."""
+  pass
+
+
+class DuplicateFlag(FlagsError):
+  """Raised if there is a flag naming conflict."""
+  pass
+
+class CantOpenFlagFileError(FlagsError):
+  """Raised if flagfile fails to open: doesn't exist, wrong permissions, etc."""
+  pass
+
+
+class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag):
+  """Special case of DuplicateFlag -- SWIG flag value can't be set to None.
+
+  This can be raised when a duplicate flag is created. Even if allow_override is
+  True, we still abort if the new value is None, because it's currently
+  impossible to pass None default value back to SWIG. See FlagValues.SetDefault
+  for details.
+  """
+  pass
+
+
+class DuplicateFlagError(DuplicateFlag):
+  """A DuplicateFlag whose message cites the conflicting definitions.
+
+  A DuplicateFlagError conveys more information than a DuplicateFlag,
+  namely the modules where the conflicting definitions occur. This
+  class was created to avoid breaking external modules which depend on
+  the existing DuplicateFlags interface.
+  """
+
+  def __init__(self, flagname, flag_values, other_flag_values=None):
+    """Create a DuplicateFlagError.
+
+    Args:
+      flagname: Name of the flag being redefined.
+      flag_values: FlagValues object containing the first definition of
+          flagname.
+      other_flag_values: If this argument is not None, it should be the
+          FlagValues object where the second definition of flagname occurs.
+          If it is None, we assume that we're being called when attempting
+          to create the flag a second time, and we use the module calling
+          this one as the source of the second definition.
+    """
+    self.flagname = flagname
+    first_module = flag_values.FindModuleDefiningFlag(
+        flagname, default='<unknown>')
+    if other_flag_values is None:
+      second_module = _GetCallingModule()
+    else:
+      second_module = other_flag_values.FindModuleDefiningFlag(
+          flagname, default='<unknown>')
+    msg = "The flag '%s' is defined twice. First from %s, Second from %s" % (
+        self.flagname, first_module, second_module)
+    DuplicateFlag.__init__(self, msg)
+
+
+class IllegalFlagValue(FlagsError):
+  """The flag command line argument is illegal."""
+  pass
+
+
+class UnrecognizedFlag(FlagsError):
+  """Raised if a flag is unrecognized."""
+  pass
+
+
+# An UnrecognizedFlagError conveys more information than an UnrecognizedFlag.
+# Since there are external modules that create DuplicateFlags, the interface to
+# DuplicateFlag shouldn't change.  The flagvalue will be assigned the full value
+# of the flag and its argument, if any, allowing handling of unrecognized flags
+# in an exception handler.
+# If flagvalue is the empty string, then this exception is an due to a
+# reference to a flag that was not already defined.
+class UnrecognizedFlagError(UnrecognizedFlag):
+  def __init__(self, flagname, flagvalue=''):
+    self.flagname = flagname
+    self.flagvalue = flagvalue
+    UnrecognizedFlag.__init__(
+        self, "Unknown command line flag '%s'" % flagname)
+
+# Global variable used by expvar
+_exported_flags = {}
+_help_width = 80  # width of help output
+
+
+def GetHelpWidth():
+  """Returns: an integer, the width of help lines that is used in TextWrap."""
+  if (not sys.stdout.isatty()) or (termios is None) or (fcntl is None):
+    return _help_width
+  try:
+    data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234')
+    columns = struct.unpack('hh', data)[1]
+    # Emacs mode returns 0.
+    # Here we assume that any value below 40 is unreasonable
+    if columns >= 40:
+      return columns
+    # Returning an int as default is fine, int(int) just return the int.
+    return int(os.getenv('COLUMNS', _help_width))
+
+  except (TypeError, IOError, struct.error):
+    return _help_width
+
+
+def CutCommonSpacePrefix(text):
+  """Removes a common space prefix from the lines of a multiline text.
+
+  If the first line does not start with a space, it is left as it is and
+  only in the remaining lines a common space prefix is being searched
+  for. That means the first line will stay untouched. This is especially
+  useful to turn doc strings into help texts. This is because some
+  people prefer to have the doc comment start already after the
+  apostrophe and then align the following lines while others have the
+  apostrophes on a separate line.
+
+  The function also drops trailing empty lines and ignores empty lines
+  following the initial content line while calculating the initial
+  common whitespace.
+
+  Args:
+    text: text to work on
+
+  Returns:
+    the resulting text
+  """
+  text_lines = text.splitlines()
+  # Drop trailing empty lines
+  while text_lines and not text_lines[-1]:
+    text_lines = text_lines[:-1]
+  if text_lines:
+    # We got some content, is the first line starting with a space?
+    if text_lines[0] and text_lines[0][0].isspace():
+      text_first_line = []
+    else:
+      text_first_line = [text_lines.pop(0)]
+    # Calculate length of common leading whitespace (only over content lines)
+    common_prefix = os.path.commonprefix([line for line in text_lines if line])
+    space_prefix_len = len(common_prefix) - len(common_prefix.lstrip())
+    # If we have a common space prefix, drop it from all lines
+    if space_prefix_len:
+      for index in xrange(len(text_lines)):
+        if text_lines[index]:
+          text_lines[index] = text_lines[index][space_prefix_len:]
+    return '\n'.join(text_first_line + text_lines)
+  return ''
+
+
+def TextWrap(text, length=None, indent='', firstline_indent=None, tabs='    '):
+  """Wraps a given text to a maximum line length and returns it.
+
+  We turn lines that only contain whitespace into empty lines.  We keep
+  new lines and tabs (e.g., we do not treat tabs as spaces).
+
+  Args:
+    text:             text to wrap
+    length:           maximum length of a line, includes indentation
+                      if this is None then use GetHelpWidth()
+    indent:           indent for all but first line
+    firstline_indent: indent for first line; if None, fall back to indent
+    tabs:             replacement for tabs
+
+  Returns:
+    wrapped text
+
+  Raises:
+    FlagsError: if indent not shorter than length
+    FlagsError: if firstline_indent not shorter than length
+  """
+  # Get defaults where callee used None
+  if length is None:
+    length = GetHelpWidth()
+  if indent is None:
+    indent = ''
+  if len(indent) >= length:
+    raise FlagsError('Indent must be shorter than length')
+  # In line we will be holding the current line which is to be started
+  # with indent (or firstline_indent if available) and then appended
+  # with words.
+  if firstline_indent is None:
+    firstline_indent = ''
+    line = indent
+  else:
+    line = firstline_indent
+    if len(firstline_indent) >= length:
+      raise FlagsError('First line indent must be shorter than length')
+
+  # If the callee does not care about tabs we simply convert them to
+  # spaces If callee wanted tabs to be single space then we do that
+  # already here.
+  if not tabs or tabs == ' ':
+    text = text.replace('\t', ' ')
+  else:
+    tabs_are_whitespace = not tabs.strip()
+
+  line_regex = re.compile('([ ]*)(\t*)([^ \t]+)', re.MULTILINE)
+
+  # Split the text into lines and the lines with the regex above. The
+  # resulting lines are collected in result[]. For each split we get the
+  # spaces, the tabs and the next non white space (e.g. next word).
+  result = []
+  for text_line in text.splitlines():
+    # Store result length so we can find out whether processing the next
+    # line gave any new content
+    old_result_len = len(result)
+    # Process next line with line_regex. For optimization we do an rstrip().
+    # - process tabs (changes either line or word, see below)
+    # - process word (first try to squeeze on line, then wrap or force wrap)
+    # Spaces found on the line are ignored, they get added while wrapping as
+    # needed.
+    for spaces, current_tabs, word in line_regex.findall(text_line.rstrip()):
+      # If tabs weren't converted to spaces, handle them now
+      if current_tabs:
+        # If the last thing we added was a space anyway then drop
+        # it. But let's not get rid of the indentation.
+        if (((result and line != indent) or
+             (not result and line != firstline_indent)) and line[-1] == ' '):
+          line = line[:-1]
+        # Add the tabs, if that means adding whitespace, just add it at
+        # the line, the rstrip() code while shorten the line down if
+        # necessary
+        if tabs_are_whitespace:
+          line += tabs * len(current_tabs)
+        else:
+          # if not all tab replacement is whitespace we prepend it to the word
+          word = tabs * len(current_tabs) + word
+      # Handle the case where word cannot be squeezed onto current last line
+      if len(line) + len(word) > length and len(indent) + len(word) <= length:
+        result.append(line.rstrip())
+        line = indent + word
+        word = ''
+        # No space left on line or can we append a space?
+        if len(line) + 1 >= length:
+          result.append(line.rstrip())
+          line = indent
+        else:
+          line += ' '
+      # Add word and shorten it up to allowed line length. Restart next
+      # line with indent and repeat, or add a space if we're done (word
+      # finished) This deals with words that cannot fit on one line
+      # (e.g. indent + word longer than allowed line length).
+      while len(line) + len(word) >= length:
+        line += word
+        result.append(line[:length])
+        word = line[length:]
+        line = indent
+      # Default case, simply append the word and a space
+      if word:
+        line += word + ' '
+    # End of input line. If we have content we finish the line. If the
+    # current line is just the indent but we had content in during this
+    # original line then we need to add an empty line.
+    if (result and line != indent) or (not result and line != firstline_indent):
+      result.append(line.rstrip())
+    elif len(result) == old_result_len:
+      result.append('')
+    line = indent
+
+  return '\n'.join(result)
+
+
+def DocToHelp(doc):
+  """Takes a __doc__ string and reformats it as help."""
+
+  # Get rid of starting and ending white space. Using lstrip() or even
+  # strip() could drop more than maximum of first line and right space
+  # of last line.
+  doc = doc.strip()
+
+  # Get rid of all empty lines
+  whitespace_only_line = re.compile('^[ \t]+$', re.M)
+  doc = whitespace_only_line.sub('', doc)
+
+  # Cut out common space at line beginnings
+  doc = CutCommonSpacePrefix(doc)
+
+  # Just like this module's comment, comments tend to be aligned somehow.
+  # In other words they all start with the same amount of white space
+  # 1) keep double new lines
+  # 2) keep ws after new lines if not empty line
+  # 3) all other new lines shall be changed to a space
+  # Solution: Match new lines between non white space and replace with space.
+  doc = re.sub('(?<=\S)\n(?=\S)', ' ', doc, re.M)
+
+  return doc
+
+
+def _GetModuleObjectAndName(globals_dict):
+  """Returns the module that defines a global environment, and its name.
+
+  Args:
+    globals_dict: A dictionary that should correspond to an environment
+      providing the values of the globals.
+
+  Returns:
+    A pair consisting of (1) module object and (2) module name (a
+    string).  Returns (None, None) if the module could not be
+    identified.
+  """
+  # The use of .items() (instead of .iteritems()) is NOT a mistake: if
+  # a parallel thread imports a module while we iterate over
+  # .iteritems() (not nice, but possible), we get a RuntimeError ...
+  # Hence, we use the slightly slower but safer .items().
+  for name, module in sys.modules.items():
+    if getattr(module, '__dict__', None) is globals_dict:
+      if name == '__main__':
+        # Pick a more informative name for the main module.
+        name = sys.argv[0]
+      return (module, name)
+  return (None, None)
+
+
+def _GetMainModule():
+  """Returns: string, name of the module from which execution started."""
+  # First, try to use the same logic used by _GetCallingModuleObjectAndName(),
+  # i.e., call _GetModuleObjectAndName().  For that we first need to
+  # find the dictionary that the main module uses to store the
+  # globals.
+  #
+  # That's (normally) the same dictionary object that the deepest
+  # (oldest) stack frame is using for globals.
+  deepest_frame = sys._getframe(0)
+  while deepest_frame.f_back is not None:
+    deepest_frame = deepest_frame.f_back
+  globals_for_main_module = deepest_frame.f_globals
+  main_module_name = _GetModuleObjectAndName(globals_for_main_module)[1]
+  # The above strategy fails in some cases (e.g., tools that compute
+  # code coverage by redefining, among other things, the main module).
+  # If so, just use sys.argv[0].  We can probably always do this, but
+  # it's safest to try to use the same logic as _GetCallingModuleObjectAndName()
+  if main_module_name is None:
+    main_module_name = sys.argv[0]
+  return main_module_name
+
+
+class FlagValues:
+  """Registry of 'Flag' objects.
+
+  A 'FlagValues' can then scan command line arguments, passing flag
+  arguments through to the 'Flag' objects that it owns.  It also
+  provides easy access to the flag values.  Typically only one
+  'FlagValues' object is needed by an application: gflags.FLAGS
+
+  This class is heavily overloaded:
+
+  'Flag' objects are registered via __setitem__:
+       FLAGS['longname'] = x   # register a new flag
+
+  The .value attribute of the registered 'Flag' objects can be accessed
+  as attributes of this 'FlagValues' object, through __getattr__.  Both
+  the long and short name of the original 'Flag' objects can be used to
+  access its value:
+       FLAGS.longname          # parsed flag value
+       FLAGS.x                 # parsed flag value (short name)
+
+  Command line arguments are scanned and passed to the registered 'Flag'
+  objects through the __call__ method.  Unparsed arguments, including
+  argv[0] (e.g. the program name) are returned.
+       argv = FLAGS(sys.argv)  # scan command line arguments
+
+  The original registered Flag objects can be retrieved through the use
+  of the dictionary-like operator, __getitem__:
+       x = FLAGS['longname']   # access the registered Flag object
+
+  The str() operator of a 'FlagValues' object provides help for all of
+  the registered 'Flag' objects.
+  """
+
+  def __init__(self):
+    # Since everything in this class is so heavily overloaded, the only
+    # way of defining and using fields is to access __dict__ directly.
+
+    # Dictionary: flag name (string) -> Flag object.
+    self.__dict__['__flags'] = {}
+    # Dictionary: module name (string) -> list of Flag objects that are defined
+    # by that module.
+    self.__dict__['__flags_by_module'] = {}
+    # Dictionary: module id (int) -> list of Flag objects that are defined by
+    # that module.
+    self.__dict__['__flags_by_module_id'] = {}
+    # Dictionary: module name (string) -> list of Flag objects that are
+    # key for that module.
+    self.__dict__['__key_flags_by_module'] = {}
+
+    # Set if we should use new style gnu_getopt rather than getopt when parsing
+    # the args.  Only possible with Python 2.3+
+    self.UseGnuGetOpt(False)
+
+  def UseGnuGetOpt(self, use_gnu_getopt=True):
+    """Use GNU-style scanning. Allows mixing of flag and non-flag arguments.
+
+    See http://docs.python.org/library/getopt.html#getopt.gnu_getopt
+
+    Args:
+      use_gnu_getopt: wether or not to use GNU style scanning.
+    """
+    self.__dict__['__use_gnu_getopt'] = use_gnu_getopt
+
+  def IsGnuGetOpt(self):
+    return self.__dict__['__use_gnu_getopt']
+
+  def FlagDict(self):
+    return self.__dict__['__flags']
+
+  def FlagsByModuleDict(self):
+    """Returns the dictionary of module_name -> list of defined flags.
+
+    Returns:
+      A dictionary.  Its keys are module names (strings).  Its values
+      are lists of Flag objects.
+    """
+    return self.__dict__['__flags_by_module']
+
+  def FlagsByModuleIdDict(self):
+    """Returns the dictionary of module_id -> list of defined flags.
+
+    Returns:
+      A dictionary.  Its keys are module IDs (ints).  Its values
+      are lists of Flag objects.
+    """
+    return self.__dict__['__flags_by_module_id']
+
+  def KeyFlagsByModuleDict(self):
+    """Returns the dictionary of module_name -> list of key flags.
+
+    Returns:
+      A dictionary.  Its keys are module names (strings).  Its values
+      are lists of Flag objects.
+    """
+    return self.__dict__['__key_flags_by_module']
+
+  def _RegisterFlagByModule(self, module_name, flag):
+    """Records the module that defines a specific flag.
+
+    We keep track of which flag is defined by which module so that we
+    can later sort the flags by module.
+
+    Args:
+      module_name: A string, the name of a Python module.
+      flag: A Flag object, a flag that is key to the module.
+    """
+    flags_by_module = self.FlagsByModuleDict()
+    flags_by_module.setdefault(module_name, []).append(flag)
+
+  def _RegisterFlagByModuleId(self, module_id, flag):
+    """Records the module that defines a specific flag.
+
+    Args:
+      module_id: An int, the ID of the Python module.
+      flag: A Flag object, a flag that is key to the module.
+    """
+    flags_by_module_id = self.FlagsByModuleIdDict()
+    flags_by_module_id.setdefault(module_id, []).append(flag)
+
+  def _RegisterKeyFlagForModule(self, module_name, flag):
+    """Specifies that a flag is a key flag for a module.
+
+    Args:
+      module_name: A string, the name of a Python module.
+      flag: A Flag object, a flag that is key to the module.
+    """
+    key_flags_by_module = self.KeyFlagsByModuleDict()
+    # The list of key flags for the module named module_name.
+    key_flags = key_flags_by_module.setdefault(module_name, [])
+    # Add flag, but avoid duplicates.
+    if flag not in key_flags:
+      key_flags.append(flag)
+
+  def _GetFlagsDefinedByModule(self, module):
+    """Returns the list of flags defined by a module.
+
+    Args:
+      module: A module object or a module name (a string).
+
+    Returns:
+      A new list of Flag objects.  Caller may update this list as he
+      wishes: none of those changes will affect the internals of this
+      FlagValue object.
+    """
+    if not isinstance(module, str):
+      module = module.__name__
+
+    return list(self.FlagsByModuleDict().get(module, []))
+
+  def _GetKeyFlagsForModule(self, module):
+    """Returns the list of key flags for a module.
+
+    Args:
+      module: A module object or a module name (a string)
+
+    Returns:
+      A new list of Flag objects.  Caller may update this list as he
+      wishes: none of those changes will affect the internals of this
+      FlagValue object.
+    """
+    if not isinstance(module, str):
+      module = module.__name__
+
+    # Any flag is a key flag for the module that defined it.  NOTE:
+    # key_flags is a fresh list: we can update it without affecting the
+    # internals of this FlagValues object.
+    key_flags = self._GetFlagsDefinedByModule(module)
+
+    # Take into account flags explicitly declared as key for a module.
+    for flag in self.KeyFlagsByModuleDict().get(module, []):
+      if flag not in key_flags:
+        key_flags.append(flag)
+    return key_flags
+
+  def FindModuleDefiningFlag(self, flagname, default=None):
+    """Return the name of the module defining this flag, or default.
+
+    Args:
+      flagname: Name of the flag to lookup.
+      default: Value to return if flagname is not defined. Defaults
+          to None.
+
+    Returns:
+      The name of the module which registered the flag with this name.
+      If no such module exists (i.e. no flag with this name exists),
+      we return default.
+    """
+    for module, flags in self.FlagsByModuleDict().iteritems():
+      for flag in flags:
+        if flag.name == flagname or flag.short_name == flagname:
+          return module
+    return default
+
+  def FindModuleIdDefiningFlag(self, flagname, default=None):
+    """Return the ID of the module defining this flag, or default.
+
+    Args:
+      flagname: Name of the flag to lookup.
+      default: Value to return if flagname is not defined. Defaults
+          to None.
+
+    Returns:
+      The ID of the module which registered the flag with this name.
+      If no such module exists (i.e. no flag with this name exists),
+      we return default.
+    """
+    for module_id, flags in self.FlagsByModuleIdDict().iteritems():
+      for flag in flags:
+        if flag.name == flagname or flag.short_name == flagname:
+          return module_id
+    return default
+
+  def AppendFlagValues(self, flag_values):
+    """Appends flags registered in another FlagValues instance.
+
+    Args:
+      flag_values: registry to copy from
+    """
+    for flag_name, flag in flag_values.FlagDict().iteritems():
+      # Each flags with shortname appears here twice (once under its
+      # normal name, and again with its short name).  To prevent
+      # problems (DuplicateFlagError) with double flag registration, we
+      # perform a check to make sure that the entry we're looking at is
+      # for its normal name.
+      if flag_name == flag.name:
+        try:
+          self[flag_name] = flag
+        except DuplicateFlagError:
+          raise DuplicateFlagError(flag_name, self,
+                                   other_flag_values=flag_values)
+
+  def RemoveFlagValues(self, flag_values):
+    """Remove flags that were previously appended from another FlagValues.
+
+    Args:
+      flag_values: registry containing flags to remove.
+    """
+    for flag_name in flag_values.FlagDict():
+      self.__delattr__(flag_name)
+
+  def __setitem__(self, name, flag):
+    """Registers a new flag variable."""
+    fl = self.FlagDict()
+    if not isinstance(flag, Flag):
+      raise IllegalFlagValue(flag)
+    if not isinstance(name, type("")):
+      raise FlagsError("Flag name must be a string")
+    if len(name) == 0:
+      raise FlagsError("Flag name cannot be empty")
+    # If running under pychecker, duplicate keys are likely to be
+    # defined.  Disable check for duplicate keys when pycheck'ing.
+    if (name in fl and not flag.allow_override and
+        not fl[name].allow_override and not _RUNNING_PYCHECKER):
+      module, module_name = _GetCallingModuleObjectAndName()
+      if (self.FindModuleDefiningFlag(name) == module_name and
+          id(module) != self.FindModuleIdDefiningFlag(name)):
+        # If the flag has already been defined by a module with the same name,
+        # but a different ID, we can stop here because it indicates that the
+        # module is simply being imported a subsequent time.
+        return
+      raise DuplicateFlagError(name, self)
+    short_name = flag.short_name
+    if short_name is not None:
+      if (short_name in fl and not flag.allow_override and
+          not fl[short_name].allow_override and not _RUNNING_PYCHECKER):
+        raise DuplicateFlagError(short_name, self)
+      fl[short_name] = flag
+    fl[name] = flag
+    global _exported_flags
+    _exported_flags[name] = flag
+
+  def __getitem__(self, name):
+    """Retrieves the Flag object for the flag --name."""
+    return self.FlagDict()[name]
+
+  def __getattr__(self, name):
+    """Retrieves the 'value' attribute of the flag --name."""
+    fl = self.FlagDict()
+    if name not in fl:
+      raise AttributeError(name)
+    return fl[name].value
+
+  def __setattr__(self, name, value):
+    """Sets the 'value' attribute of the flag --name."""
+    fl = self.FlagDict()
+    fl[name].value = value
+    self._AssertValidators(fl[name].validators)
+    return value
+
+  def _AssertAllValidators(self):
+    all_validators = set()
+    for flag in self.FlagDict().itervalues():
+      for validator in flag.validators:
+        all_validators.add(validator)
+    self._AssertValidators(all_validators)
+
+  def _AssertValidators(self, validators):
+    """Assert if all validators in the list are satisfied.
+
+    Asserts validators in the order they were created.
+    Args:
+      validators: Iterable(gflags_validators.Validator), validators to be
+        verified
+    Raises:
+      AttributeError: if validators work with a non-existing flag.
+      IllegalFlagValue: if validation fails for at least one validator
+    """
+    for validator in sorted(
+        validators, key=lambda validator: validator.insertion_index):
+      try:
+        validator.Verify(self)
+      except gflags_validators.Error, e:
+        message = validator.PrintFlagsWithValues(self)
+        raise IllegalFlagValue('%s: %s' % (message, str(e)))
+
+  def _FlagIsRegistered(self, flag_obj):
+    """Checks whether a Flag object is registered under some name.
+
+    Note: this is non trivial: in addition to its normal name, a flag
+    may have a short name too.  In self.FlagDict(), both the normal and
+    the short name are mapped to the same flag object.  E.g., calling
+    only "del FLAGS.short_name" is not unregistering the corresponding
+    Flag object (it is still registered under the longer name).
+
+    Args:
+      flag_obj: A Flag object.
+
+    Returns:
+      A boolean: True iff flag_obj is registered under some name.
+    """
+    flag_dict = self.FlagDict()
+    # Check whether flag_obj is registered under its long name.
+    name = flag_obj.name
+    if flag_dict.get(name, None) == flag_obj:
+      return True
+    # Check whether flag_obj is registered under its short name.
+    short_name = flag_obj.short_name
+    if (short_name is not None and
+        flag_dict.get(short_name, None) == flag_obj):
+      return True
+    # The flag cannot be registered under any other name, so we do not
+    # need to do a full search through the values of self.FlagDict().
+    return False
+
+  def __delattr__(self, flag_name):
+    """Deletes a previously-defined flag from a flag object.
+
+    This method makes sure we can delete a flag by using
+
+      del flag_values_object.<flag_name>
+
+    E.g.,
+
+      gflags.DEFINE_integer('foo', 1, 'Integer flag.')
+      del gflags.FLAGS.foo
+
+    Args:
+      flag_name: A string, the name of the flag to be deleted.
+
+    Raises:
+      AttributeError: When there is no registered flag named flag_name.
+    """
+    fl = self.FlagDict()
+    if flag_name not in fl:
+      raise AttributeError(flag_name)
+
+    flag_obj = fl[flag_name]
+    del fl[flag_name]
+
+    if not self._FlagIsRegistered(flag_obj):
+      # If the Flag object indicated by flag_name is no longer
+      # registered (please see the docstring of _FlagIsRegistered), then
+      # we delete the occurrences of the flag object in all our internal
+      # dictionaries.
+      self.__RemoveFlagFromDictByModule(self.FlagsByModuleDict(), flag_obj)
+      self.__RemoveFlagFromDictByModule(self.FlagsByModuleIdDict(), flag_obj)
+      self.__RemoveFlagFromDictByModule(self.KeyFlagsByModuleDict(), flag_obj)
+
+  def __RemoveFlagFromDictByModule(self, flags_by_module_dict, flag_obj):
+    """Removes a flag object from a module -> list of flags dictionary.
+
+    Args:
+      flags_by_module_dict: A dictionary that maps module names to lists of
+        flags.
+      flag_obj: A flag object.
+    """
+    for unused_module, flags_in_module in flags_by_module_dict.iteritems():
+      # while (as opposed to if) takes care of multiple occurrences of a
+      # flag in the list for the same module.
+      while flag_obj in flags_in_module:
+        flags_in_module.remove(flag_obj)
+
+  def SetDefault(self, name, value):
+    """Changes the default value of the named flag object."""
+    fl = self.FlagDict()
+    if name not in fl:
+      raise AttributeError(name)
+    fl[name].SetDefault(value)
+    self._AssertValidators(fl[name].validators)
+
+  def __contains__(self, name):
+    """Returns True if name is a value (flag) in the dict."""
+    return name in self.FlagDict()
+
+  has_key = __contains__  # a synonym for __contains__()
+
+  def __iter__(self):
+    return iter(self.FlagDict())
+
+  def __call__(self, argv):
+    """Parses flags from argv; stores parsed flags into this FlagValues object.
+
+    All unparsed arguments are returned.  Flags are parsed using the GNU
+    Program Argument Syntax Conventions, using getopt:
+
+    http://www.gnu.org/software/libc/manual/html_mono/libc.html#Getopt
+
+    Args:
+       argv: argument list. Can be of any type that may be converted to a list.
+
+    Returns:
+       The list of arguments not parsed as options, including argv[0]
+
+    Raises:
+       FlagsError: on any parsing error
+    """
+    # Support any sequence type that can be converted to a list
+    argv = list(argv)
+
+    shortopts = ""
+    longopts = []
+
+    fl = self.FlagDict()
+
+    # This pre parses the argv list for --flagfile=<> options.
+    argv = argv[:1] + self.ReadFlagsFromFiles(argv[1:], force_gnu=False)
+
+    # Correct the argv to support the google style of passing boolean
+    # parameters.  Boolean parameters may be passed by using --mybool,
+    # --nomybool, --mybool=(true|false|1|0).  getopt does not support
+    # having options that may or may not have a parameter.  We replace
+    # instances of the short form --mybool and --nomybool with their
+    # full forms: --mybool=(true|false).
+    original_argv = list(argv)  # list() makes a copy
+    shortest_matches = None
+    for name, flag in fl.items():
+      if not flag.boolean:
+        continue
+      if shortest_matches is None:
+        # Determine the smallest allowable prefix for all flag names
+        shortest_matches = self.ShortestUniquePrefixes(fl)
+      no_name = 'no' + name
+      prefix = shortest_matches[name]
+      no_prefix = shortest_matches[no_name]
+
+      # Replace all occurrences of this boolean with extended forms
+      for arg_idx in range(1, len(argv)):
+        arg = argv[arg_idx]
+        if arg.find('=') >= 0: continue
+        if arg.startswith('--'+prefix) and ('--'+name).startswith(arg):
+          argv[arg_idx] = ('--%s=true' % name)
+        elif arg.startswith('--'+no_prefix) and ('--'+no_name).startswith(arg):
+          argv[arg_idx] = ('--%s=false' % name)
+
+    # Loop over all of the flags, building up the lists of short options
+    # and long options that will be passed to getopt.  Short options are
+    # specified as a string of letters, each letter followed by a colon
+    # if it takes an argument.  Long options are stored in an array of
+    # strings.  Each string ends with an '=' if it takes an argument.
+    for name, flag in fl.items():
+      longopts.append(name + "=")
+      if len(name) == 1:  # one-letter option: allow short flag type also
+        shortopts += name
+        if not flag.boolean:
+          shortopts += ":"
+
+    longopts.append('undefok=')
+    undefok_flags = []
+
+    # In case --undefok is specified, loop to pick up unrecognized
+    # options one by one.
+    unrecognized_opts = []
+    args = argv[1:]
+    while True:
+      try:
+        if self.__dict__['__use_gnu_getopt']:
+          optlist, unparsed_args = getopt.gnu_getopt(args, shortopts, longopts)
+        else:
+          optlist, unparsed_args = getopt.getopt(args, shortopts, longopts)
+        break
+      except getopt.GetoptError, e:
+        if not e.opt or e.opt in fl:
+          # Not an unrecognized option, re-raise the exception as a FlagsError
+          raise FlagsError(e)
+        # Remove offender from args and try again
+        for arg_index in range(len(args)):
+          if ((args[arg_index] == '--' + e.opt) or
+              (args[arg_index] == '-' + e.opt) or
+              (args[arg_index].startswith('--' + e.opt + '='))):
+            unrecognized_opts.append((e.opt, args[arg_index]))
+            args = args[0:arg_index] + args[arg_index+1:]
+            break
+        else:
+          # We should have found the option, so we don't expect to get
+          # here.  We could assert, but raising the original exception
+          # might work better.
+          raise FlagsError(e)
+
+    for name, arg in optlist:
+      if name == '--undefok':
+        flag_names = arg.split(',')
+        undefok_flags.extend(flag_names)
+        # For boolean flags, if --undefok=boolflag is specified, then we should
+        # also accept --noboolflag, in addition to --boolflag.
+        # Since we don't know the type of the undefok'd flag, this will affect
+        # non-boolean flags as well.
+        # NOTE: You shouldn't use --undefok=noboolflag, because then we will
+        # accept --nonoboolflag here.  We are choosing not to do the conversion
+        # from noboolflag -> boolflag because of the ambiguity that flag names
+        # can start with 'no'.
+        undefok_flags.extend('no' + name for name in flag_names)
+        continue
+      if name.startswith('--'):
+        # long option
+        name = name[2:]
+        short_option = 0
+      else:
+        # short option
+        name = name[1:]
+        short_option = 1
+      if name in fl:
+        flag = fl[name]
+        if flag.boolean and short_option: arg = 1
+        flag.Parse(arg)
+
+    # If there were unrecognized options, raise an exception unless
+    # the options were named via --undefok.
+    for opt, value in unrecognized_opts:
+      if opt not in undefok_flags:
+        raise UnrecognizedFlagError(opt, value)
+
+    if unparsed_args:
+      if self.__dict__['__use_gnu_getopt']:
+        # if using gnu_getopt just return the program name + remainder of argv.
+        ret_val = argv[:1] + unparsed_args
+      else:
+        # unparsed_args becomes the first non-flag detected by getopt to
+        # the end of argv.  Because argv may have been modified above,
+        # return original_argv for this region.
+        ret_val = argv[:1] + original_argv[-len(unparsed_args):]
+    else:
+      ret_val = argv[:1]
+
+    self._AssertAllValidators()
+    return ret_val
+
+  def Reset(self):
+    """Resets the values to the point before FLAGS(argv) was called."""
+    for f in self.FlagDict().values():
+      f.Unparse()
+
+  def RegisteredFlags(self):
+    """Returns: a list of the names and short names of all registered flags."""
+    return list(self.FlagDict())
+
+  def FlagValuesDict(self):
+    """Returns: a dictionary that maps flag names to flag values."""
+    flag_values = {}
+
+    for flag_name in self.RegisteredFlags():
+      flag = self.FlagDict()[flag_name]
+      flag_values[flag_name] = flag.value
+
+    return flag_values
+
+  def __str__(self):
+    """Generates a help string for all known flags."""
+    return self.GetHelp()
+
+  def GetHelp(self, prefix=''):
+    """Generates a help string for all known flags."""
+    helplist = []
+
+    flags_by_module = self.FlagsByModuleDict()
+    if flags_by_module:
+
+      modules = sorted(flags_by_module)
+
+      # Print the help for the main module first, if possible.
+      main_module = _GetMainModule()
+      if main_module in modules:
+        modules.remove(main_module)
+        modules = [main_module] + modules
+
+      for module in modules:
+        self.__RenderOurModuleFlags(module, helplist)
+
+      self.__RenderModuleFlags('gflags',
+                               _SPECIAL_FLAGS.FlagDict().values(),
+                               helplist)
+
+    else:
+      # Just print one long list of flags.
+      self.__RenderFlagList(
+          self.FlagDict().values() + _SPECIAL_FLAGS.FlagDict().values(),
+          helplist, prefix)
+
+    return '\n'.join(helplist)
+
+  def __RenderModuleFlags(self, module, flags, output_lines, prefix=""):
+    """Generates a help string for a given module."""
+    if not isinstance(module, str):
+      module = module.__name__
+    output_lines.append('\n%s%s:' % (prefix, module))
+    self.__RenderFlagList(flags, output_lines, prefix + "  ")
+
+  def __RenderOurModuleFlags(self, module, output_lines, prefix=""):
+    """Generates a help string for a given module."""
+    flags = self._GetFlagsDefinedByModule(module)
+    if flags:
+      self.__RenderModuleFlags(module, flags, output_lines, prefix)
+
+  def __RenderOurModuleKeyFlags(self, module, output_lines, prefix=""):
+    """Generates a help string for the key flags of a given module.
+
+    Args:
+      module: A module object or a module name (a string).
+      output_lines: A list of strings.  The generated help message
+        lines will be appended to this list.
+      prefix: A string that is prepended to each generated help line.
+    """
+    key_flags = self._GetKeyFlagsForModule(module)
+    if key_flags:
+      self.__RenderModuleFlags(module, key_flags, output_lines, prefix)
+
+  def ModuleHelp(self, module):
+    """Describe the key flags of a module.
+
+    Args:
+      module: A module object or a module name (a string).
+
+    Returns:
+      string describing the key flags of a module.
+    """
+    helplist = []
+    self.__RenderOurModuleKeyFlags(module, helplist)
+    return '\n'.join(helplist)
+
+  def MainModuleHelp(self):
+    """Describe the key flags of the main module.
+
+    Returns:
+      string describing the key flags of a module.
+    """
+    return self.ModuleHelp(_GetMainModule())
+
+  def __RenderFlagList(self, flaglist, output_lines, prefix="  "):
+    fl = self.FlagDict()
+    special_fl = _SPECIAL_FLAGS.FlagDict()
+    flaglist = [(flag.name, flag) for flag in flaglist]
+    flaglist.sort()
+    flagset = {}
+    for (name, flag) in flaglist:
+      # It's possible this flag got deleted or overridden since being
+      # registered in the per-module flaglist.  Check now against the
+      # canonical source of current flag information, the FlagDict.
+      if fl.get(name, None) != flag and special_fl.get(name, None) != flag:
+        # a different flag is using this name now
+        continue
+      # only print help once
+      if flag in flagset: continue
+      flagset[flag] = 1
+      flaghelp = ""
+      if flag.short_name: flaghelp += "-%s," % flag.short_name
+      if flag.boolean:
+        flaghelp += "--[no]%s" % flag.name + ":"
+      else:
+        flaghelp += "--%s" % flag.name + ":"
+      flaghelp += "  "
+      if flag.help:
+        flaghelp += flag.help
+      flaghelp = TextWrap(flaghelp, indent=prefix+"  ",
+                          firstline_indent=prefix)
+      if flag.default_as_str:
+        flaghelp += "\n"
+        flaghelp += TextWrap("(default: %s)" % flag.default_as_str,
+                             indent=prefix+"  ")
+      if flag.parser.syntactic_help:
+        flaghelp += "\n"
+        flaghelp += TextWrap("(%s)" % flag.parser.syntactic_help,
+                             indent=prefix+"  ")
+      output_lines.append(flaghelp)
+
+  def get(self, name, default):
+    """Returns the value of a flag (if not None) or a default value.
+
+    Args:
+      name: A string, the name of a flag.
+      default: Default value to use if the flag value is None.
+    """
+
+    value = self.__getattr__(name)
+    if value is not None:  # Can't do if not value, b/c value might be '0' or ""
+      return value
+    else:
+      return default
+
+  def ShortestUniquePrefixes(self, fl):
+    """Returns: dictionary; maps flag names to their shortest unique prefix."""
+    # Sort the list of flag names
+    sorted_flags = []
+    for name, flag in fl.items():
+      sorted_flags.append(name)
+      if flag.boolean:
+        sorted_flags.append('no%s' % name)
+    sorted_flags.sort()
+
+    # For each name in the sorted list, determine the shortest unique
+    # prefix by comparing itself to the next name and to the previous
+    # name (the latter check uses cached info from the previous loop).
+    shortest_matches = {}
+    prev_idx = 0
+    for flag_idx in range(len(sorted_flags)):
+      curr = sorted_flags[flag_idx]
+      if flag_idx == (len(sorted_flags) - 1):
+        next = None
+      else:
+        next = sorted_flags[flag_idx+1]
+        next_len = len(next)
+      for curr_idx in range(len(curr)):
+        if (next is None
+            or curr_idx >= next_len
+            or curr[curr_idx] != next[curr_idx]):
+          # curr longer than next or no more chars in common
+          shortest_matches[curr] = curr[:max(prev_idx, curr_idx) + 1]
+          prev_idx = curr_idx
+          break
+      else:
+        # curr shorter than (or equal to) next
+        shortest_matches[curr] = curr
+        prev_idx = curr_idx + 1  # next will need at least one more char
+    return shortest_matches
+
+  def __IsFlagFileDirective(self, flag_string):
+    """Checks whether flag_string contain a --flagfile=<foo> directive."""
+    if isinstance(flag_string, type("")):
+      if flag_string.startswith('--flagfile='):
+        return 1
+      elif flag_string == '--flagfile':
+        return 1
+      elif flag_string.startswith('-flagfile='):
+        return 1
+      elif flag_string == '-flagfile':
+        return 1
+      else:
+        return 0
+    return 0
+
+  def ExtractFilename(self, flagfile_str):
+    """Returns filename from a flagfile_str of form -[-]flagfile=filename.
+
+    The cases of --flagfile foo and -flagfile foo shouldn't be hitting
+    this function, as they are dealt with in the level above this
+    function.
+    """
+    if flagfile_str.startswith('--flagfile='):
+      return os.path.expanduser((flagfile_str[(len('--flagfile=')):]).strip())
+    elif flagfile_str.startswith('-flagfile='):
+      return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip())
+    else:
+      raise FlagsError('Hit illegal --flagfile type: %s' % flagfile_str)
+
+  def __GetFlagFileLines(self, filename, parsed_file_list):
+    """Returns the useful (!=comments, etc) lines from a file with flags.
+
+    Args:
+      filename: A string, the name of the flag file.
+      parsed_file_list: A list of the names of the files we have
+        already read.  MUTATED BY THIS FUNCTION.
+
+    Returns:
+      List of strings. See the note below.
+
+    NOTE(springer): This function checks for a nested --flagfile=<foo>
+    tag and handles the lower file recursively. It returns a list of
+    all the lines that _could_ contain command flags. This is
+    EVERYTHING except whitespace lines and comments (lines starting
+    with '#' or '//').
+    """
+    line_list = []  # All line from flagfile.
+    flag_line_list = []  # Subset of lines w/o comments, blanks, flagfile= tags.
+    try:
+      file_obj = open(filename, 'r')
+    except IOError, e_msg:
+      raise CantOpenFlagFileError('ERROR:: Unable to open flagfile: %s' % e_msg)
+
+    line_list = file_obj.readlines()
+    file_obj.close()
+    parsed_file_list.append(filename)
+
+    # This is where we check each line in the file we just read.
+    for line in line_list:
+      if line.isspace():
+        pass
+      # Checks for comment (a line that starts with '#').
+      elif line.startswith('#') or line.startswith('//'):
+        pass
+      # Checks for a nested "--flagfile=<bar>" flag in the current file.
+      # If we find one, recursively parse down into that file.
+      elif self.__IsFlagFileDirective(line):
+        sub_filename = self.ExtractFilename(line)
+        # We do a little safety check for reparsing a file we've already done.
+        if not sub_filename in parsed_file_list:
+          included_flags = self.__GetFlagFileLines(sub_filename,
+                                                   parsed_file_list)
+          flag_line_list.extend(included_flags)
+        else:  # Case of hitting a circularly included file.
+          sys.stderr.write('Warning: Hit circular flagfile dependency: %s\n' %
+                           (sub_filename,))
+      else:
+        # Any line that's not a comment or a nested flagfile should get
+        # copied into 2nd position.  This leaves earlier arguments
+        # further back in the list, thus giving them higher priority.
+        flag_line_list.append(line.strip())
+    return flag_line_list
+
+  def ReadFlagsFromFiles(self, argv, force_gnu=True):
+    """Processes command line args, but also allow args to be read from file.
+
+    Args:
+      argv: A list of strings, usually sys.argv[1:], which may contain one or
+        more flagfile directives of the form --flagfile="./filename".
+        Note that the name of the program (sys.argv[0]) should be omitted.
+      force_gnu: If False, --flagfile parsing obeys normal flag semantics.
+        If True, --flagfile parsing instead follows gnu_getopt semantics.
+        *** WARNING *** force_gnu=False may become the future default!
+
+    Returns:
+
+      A new list which has the original list combined with what we read
+      from any flagfile(s).
+
+    References: Global gflags.FLAG class instance.
+
+    This function should be called before the normal FLAGS(argv) call.
+    This function scans the input list for a flag that looks like:
+    --flagfile=<somefile>. Then it opens <somefile>, reads all valid key
+    and value pairs and inserts them into the input list between the
+    first item of the list and any subsequent items in the list.
+
+    Note that your application's flags are still defined the usual way
+    using gflags DEFINE_flag() type functions.
+
+    Notes (assuming we're getting a commandline of some sort as our input):
+    --> Flags from the command line argv _should_ always take precedence!
+    --> A further "--flagfile=<otherfile.cfg>" CAN be nested in a flagfile.
+        It will be processed after the parent flag file is done.
+    --> For duplicate flags, first one we hit should "win".
+    --> In a flagfile, a line beginning with # or // is a comment.
+    --> Entirely blank lines _should_ be ignored.
+    """
+    parsed_file_list = []
+    rest_of_args = argv
+    new_argv = []
+    while rest_of_args:
+      current_arg = rest_of_args[0]
+      rest_of_args = rest_of_args[1:]
+      if self.__IsFlagFileDirective(current_arg):
+        # This handles the case of -(-)flagfile foo.  In this case the
+        # next arg really is part of this one.
+        if current_arg == '--flagfile' or current_arg == '-flagfile':
+          if not rest_of_args:
+            raise IllegalFlagValue('--flagfile with no argument')
+          flag_filename = os.path.expanduser(rest_of_args[0])
+          rest_of_args = rest_of_args[1:]
+        else:
+          # This handles the case of (-)-flagfile=foo.
+          flag_filename = self.ExtractFilename(current_arg)
+        new_argv.extend(
+            self.__GetFlagFileLines(flag_filename, parsed_file_list))
+      else:
+        new_argv.append(current_arg)
+        # Stop parsing after '--', like getopt and gnu_getopt.
+        if current_arg == '--':
+          break
+        # Stop parsing after a non-flag, like getopt.
+        if not current_arg.startswith('-'):
+          if not force_gnu and not self.__dict__['__use_gnu_getopt']:
+            break
+
+    if rest_of_args:
+      new_argv.extend(rest_of_args)
+
+    return new_argv
+
+  def FlagsIntoString(self):
+    """Returns a string with the flags assignments from this FlagValues object.
+
+    This function ignores flags whose value is None.  Each flag
+    assignment is separated by a newline.
+
+    NOTE: MUST mirror the behavior of the C++ CommandlineFlagsIntoString
+    from http://code.google.com/p/google-gflags
+    """
+    s = ''
+    for flag in self.FlagDict().values():
+      if flag.value is not None:
+        s += flag.Serialize() + '\n'
+    return s
+
+  def AppendFlagsIntoFile(self, filename):
+    """Appends all flags assignments from this FlagInfo object to a file.
+
+    Output will be in the format of a flagfile.
+
+    NOTE: MUST mirror the behavior of the C++ AppendFlagsIntoFile
+    from http://code.google.com/p/google-gflags
+    """
+    out_file = open(filename, 'a')
+    out_file.write(self.FlagsIntoString())
+    out_file.close()
+
+  def WriteHelpInXMLFormat(self, outfile=None):
+    """Outputs flag documentation in XML format.
+
+    NOTE: We use element names that are consistent with those used by
+    the C++ command-line flag library, from
+    http://code.google.com/p/google-gflags
+    We also use a few new elements (e.g., <key>), but we do not
+    interfere / overlap with existing XML elements used by the C++
+    library.  Please maintain this consistency.
+
+    Args:
+      outfile: File object we write to.  Default None means sys.stdout.
+    """
+    outfile = outfile or sys.stdout
+
+    outfile.write('<?xml version=\"1.0\"?>\n')
+    outfile.write('<AllFlags>\n')
+    indent = '  '
+    _WriteSimpleXMLElement(outfile, 'program', os.path.basename(sys.argv[0]),
+                           indent)
+
+    usage_doc = sys.modules['__main__'].__doc__
+    if not usage_doc:
+      usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0]
+    else:
+      usage_doc = usage_doc.replace('%s', sys.argv[0])
+    _WriteSimpleXMLElement(outfile, 'usage', usage_doc, indent)
+
+    # Get list of key flags for the main module.
+    key_flags = self._GetKeyFlagsForModule(_GetMainModule())
+
+    # Sort flags by declaring module name and next by flag name.
+    flags_by_module = self.FlagsByModuleDict()
+    all_module_names = list(flags_by_module.keys())
+    all_module_names.sort()
+    for module_name in all_module_names:
+      flag_list = [(f.name, f) for f in flags_by_module[module_name]]
+      flag_list.sort()
+      for unused_flag_name, flag in flag_list:
+        is_key = flag in key_flags
+        flag.WriteInfoInXMLFormat(outfile, module_name,
+                                  is_key=is_key, indent=indent)
+
+    outfile.write('</AllFlags>\n')
+    outfile.flush()
+
+  def AddValidator(self, validator):
+    """Register new flags validator to be checked.
+
+    Args:
+      validator: gflags_validators.Validator
+    Raises:
+      AttributeError: if validators work with a non-existing flag.
+    """
+    for flag_name in validator.GetFlagsNames():
+      flag = self.FlagDict()[flag_name]
+      flag.validators.append(validator)
+
+# end of FlagValues definition
+
+
+# The global FlagValues instance
+FLAGS = FlagValues()
+
+
+def _StrOrUnicode(value):
+  """Converts value to a python string or, if necessary, unicode-string."""
+  try:
+    return str(value)
+  except UnicodeEncodeError:
+    return unicode(value)
+
+
+def _MakeXMLSafe(s):
+  """Escapes <, >, and & from s, and removes XML 1.0-illegal chars."""
+  s = cgi.escape(s)  # Escape <, >, and &
+  # Remove characters that cannot appear in an XML 1.0 document
+  # (http://www.w3.org/TR/REC-xml/#charsets).
+  #
+  # NOTE: if there are problems with current solution, one may move to
+  # XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;).
+  s = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f]', '', s)
+  # Convert non-ascii characters to entities.  Note: requires python >=2.3
+  s = s.encode('ascii', 'xmlcharrefreplace')   # u'\xce\x88' -> 'u&#904;'
+  return s
+
+
+def _WriteSimpleXMLElement(outfile, name, value, indent):
+  """Writes a simple XML element.
+
+  Args:
+    outfile: File object we write the XML element to.
+    name: A string, the name of XML element.
+    value: A Python object, whose string representation will be used
+      as the value of the XML element.
+    indent: A string, prepended to each line of generated output.
+  """
+  value_str = _StrOrUnicode(value)
+  if isinstance(value, bool):
+    # Display boolean values as the C++ flag library does: no caps.
+    value_str = value_str.lower()
+  safe_value_str = _MakeXMLSafe(value_str)
+  outfile.write('%s<%s>%s</%s>\n' % (indent, name, safe_value_str, name))
+
+
+class Flag:
+  """Information about a command-line flag.
+
+  'Flag' objects define the following fields:
+    .name  - the name for this flag
+    .default - the default value for this flag
+    .default_as_str - default value as repr'd string, e.g., "'true'" (or None)
+    .value  - the most recent parsed value of this flag; set by Parse()
+    .help  - a help string or None if no help is available
+    .short_name  - the single letter alias for this flag (or None)
+    .boolean  - if 'true', this flag does not accept arguments
+    .present  - true if this flag was parsed from command line flags.
+    .parser  - an ArgumentParser object
+    .serializer - an ArgumentSerializer object
+    .allow_override - the flag may be redefined without raising an error
+
+  The only public method of a 'Flag' object is Parse(), but it is
+  typically only called by a 'FlagValues' object.  The Parse() method is
+  a thin wrapper around the 'ArgumentParser' Parse() method.  The parsed
+  value is saved in .value, and the .present attribute is updated.  If
+  this flag was already present, a FlagsError is raised.
+
+  Parse() is also called during __init__ to parse the default value and
+  initialize the .value attribute.  This enables other python modules to
+  safely use flags even if the __main__ module neglects to parse the
+  command line arguments.  The .present attribute is cleared after
+  __init__ parsing.  If the default value is set to None, then the
+  __init__ parsing step is skipped and the .value attribute is
+  initialized to None.
+
+  Note: The default value is also presented to the user in the help
+  string, so it is important that it be a legal value for this flag.
+  """
+
+  def __init__(self, parser, serializer, name, default, help_string,
+               short_name=None, boolean=0, allow_override=0):
+    self.name = name
+
+    if not help_string:
+      help_string = '(no help available)'
+
+    self.help = help_string
+    self.short_name = short_name
+    self.boolean = boolean
+    self.present = 0
+    self.parser = parser
+    self.serializer = serializer
+    self.allow_override = allow_override
+    self.value = None
+    self.validators = []
+
+    self.SetDefault(default)
+
+  def __hash__(self):
+    return hash(id(self))
+
+  def __eq__(self, other):
+    return self is other
+
+  def __lt__(self, other):
+    if isinstance(other, Flag):
+      return id(self) < id(other)
+    return NotImplemented
+
+  def __GetParsedValueAsString(self, value):
+    if value is None:
+      return None
+    if self.serializer:
+      return repr(self.serializer.Serialize(value))
+    if self.boolean:
+      if value:
+        return repr('true')
+      else:
+        return repr('false')
+    return repr(_StrOrUnicode(value))
+
+  def Parse(self, argument):
+    try:
+      self.value = self.parser.Parse(argument)
+    except ValueError, e:  # recast ValueError as IllegalFlagValue
+      raise IllegalFlagValue("flag --%s=%s: %s" % (self.name, argument, e))
+    self.present += 1
+
+  def Unparse(self):
+    if self.default is None:
+      self.value = None
+    else:
+      self.Parse(self.default)
+    self.present = 0
+
+  def Serialize(self):
+    if self.value is None:
+      return ''
+    if self.boolean:
+      if self.value:
+        return "--%s" % self.name
+      else:
+        return "--no%s" % self.name
+    else:
+      if not self.serializer:
+        raise FlagsError("Serializer not present for flag %s" % self.name)
+      return "--%s=%s" % (self.name, self.serializer.Serialize(self.value))
+
+  def SetDefault(self, value):
+    """Changes the default value (and current value too) for this Flag."""
+    # We can't allow a None override because it may end up not being
+    # passed to C++ code when we're overriding C++ flags.  So we
+    # cowardly bail out until someone fixes the semantics of trying to
+    # pass None to a C++ flag.  See swig_flags.Init() for details on
+    # this behavior.
+    # TODO(olexiy): Users can directly call this method, bypassing all flags
+    # validators (we don't have FlagValues here, so we can not check
+    # validators).
+    # The simplest solution I see is to make this method private.
+    # Another approach would be to store reference to the corresponding
+    # FlagValues with each flag, but this seems to be an overkill.
+    if value is None and self.allow_override:
+      raise DuplicateFlagCannotPropagateNoneToSwig(self.name)
+
+    self.default = value
+    self.Unparse()
+    self.default_as_str = self.__GetParsedValueAsString(self.value)
+
+  def Type(self):
+    """Returns: a string that describes the type of this Flag."""
+    # NOTE: we use strings, and not the types.*Type constants because
+    # our flags can have more exotic types, e.g., 'comma separated list
+    # of strings', 'whitespace separated list of strings', etc.
+    return self.parser.Type()
+
+  def WriteInfoInXMLFormat(self, outfile, module_name, is_key=False, indent=''):
+    """Writes common info about this flag, in XML format.
+
+    This is information that is relevant to all flags (e.g., name,
+    meaning, etc.).  If you defined a flag that has some other pieces of
+    info, then please override _WriteCustomInfoInXMLFormat.
+
+    Please do NOT override this method.
+
+    Args:
+      outfile: File object we write to.
+      module_name: A string, the name of the module that defines this flag.
+      is_key: A boolean, True iff this flag is key for main module.
+      indent: A string that is prepended to each generated line.
+    """
+    outfile.write(indent + '<flag>\n')
+    inner_indent = indent + '  '
+    if is_key:
+      _WriteSimpleXMLElement(outfile, 'key', 'yes', inner_indent)
+    _WriteSimpleXMLElement(outfile, 'file', module_name, inner_indent)
+    # Print flag features that are relevant for all flags.
+    _WriteSimpleXMLElement(outfile, 'name', self.name, inner_indent)
+    if self.short_name:
+      _WriteSimpleXMLElement(outfile, 'short_name', self.short_name,
+                             inner_indent)
+    if self.help:
+      _WriteSimpleXMLElement(outfile, 'meaning', self.help, inner_indent)
+    # The default flag value can either be represented as a string like on the
+    # command line, or as a Python object.  We serialize this value in the
+    # latter case in order to remain consistent.
+    if self.serializer and not isinstance(self.default, str):
+      default_serialized = self.serializer.Serialize(self.default)
+    else:
+      default_serialized = self.default
+    _WriteSimpleXMLElement(outfile, 'default', default_serialized, inner_indent)
+    _WriteSimpleXMLElement(outfile, 'current', self.value, inner_indent)
+    _WriteSimpleXMLElement(outfile, 'type', self.Type(), inner_indent)
+    # Print extra flag features this flag may have.
+    self._WriteCustomInfoInXMLFormat(outfile, inner_indent)
+    outfile.write(indent + '</flag>\n')
+
+  def _WriteCustomInfoInXMLFormat(self, outfile, indent):
+    """Writes extra info about this flag, in XML format.
+
+    "Extra" means "not already printed by WriteInfoInXMLFormat above."
+
+    Args:
+      outfile: File object we write to.
+      indent: A string that is prepended to each generated line.
+    """
+    # Usually, the parser knows the extra details about the flag, so
+    # we just forward the call to it.
+    self.parser.WriteCustomInfoInXMLFormat(outfile, indent)
+# End of Flag definition
+
+
+class _ArgumentParserCache(type):
+  """Metaclass used to cache and share argument parsers among flags."""
+
+  _instances = {}
+
+  def __call__(mcs, *args, **kwargs):
+    """Returns an instance of the argument parser cls.
+
+    This method overrides behavior of the __new__ methods in
+    all subclasses of ArgumentParser (inclusive). If an instance
+    for mcs with the same set of arguments exists, this instance is
+    returned, otherwise a new instance is created.
+
+    If any keyword arguments are defined, or the values in args
+    are not hashable, this method always returns a new instance of
+    cls.
+
+    Args:
+      args: Positional initializer arguments.
+      kwargs: Initializer keyword arguments.
+
+    Returns:
+      An instance of cls, shared or new.
+    """
+    if kwargs:
+      return type.__call__(mcs, *args, **kwargs)
+    else:
+      instances = mcs._instances
+      key = (mcs,) + tuple(args)
+      try:
+        return instances[key]
+      except KeyError:
+        # No cache entry for key exists, create a new one.
+        return instances.setdefault(key, type.__call__(mcs, *args))
+      except TypeError:
+        # An object in args cannot be hashed, always return
+        # a new instance.
+        return type.__call__(mcs, *args)
+
+
+class ArgumentParser(object):
+  """Base class used to parse and convert arguments.
+
+  The Parse() method checks to make sure that the string argument is a
+  legal value and convert it to a native type.  If the value cannot be
+  converted, it should throw a 'ValueError' exception with a human
+  readable explanation of why the value is illegal.
+
+  Subclasses should also define a syntactic_help string which may be
+  presented to the user to describe the form of the legal values.
+
+  Argument parser classes must be stateless, since instances are cached
+  and shared between flags. Initializer arguments are allowed, but all
+  member variables must be derived from initializer arguments only.
+  """
+  __metaclass__ = _ArgumentParserCache
+
+  syntactic_help = ""
+
+  def Parse(self, argument):
+    """Default implementation: always returns its argument unmodified."""
+    return argument
+
+  def Type(self):
+    return 'string'
+
+  def WriteCustomInfoInXMLFormat(self, outfile, indent):
+    pass
+
+
+class ArgumentSerializer:
+  """Base class for generating string representations of a flag value."""
+
+  def Serialize(self, value):
+    return _StrOrUnicode(value)
+
+
+class ListSerializer(ArgumentSerializer):
+
+  def __init__(self, list_sep):
+    self.list_sep = list_sep
+
+  def Serialize(self, value):
+    return self.list_sep.join([_StrOrUnicode(x) for x in value])
+
+
+# Flags validators
+
+
+def RegisterValidator(flag_name,
+                      checker,
+                      message='Flag validation failed',
+                      flag_values=FLAGS):
+  """Adds a constraint, which will be enforced during program execution.
+
+  The constraint is validated when flags are initially parsed, and after each
+  change of the corresponding flag's value.
+  Args:
+    flag_name: string, name of the flag to be checked.
+    checker: method to validate the flag.
+      input  - value of the corresponding flag (string, boolean, etc.
+        This value will be passed to checker by the library). See file's
+        docstring for examples.
+      output - Boolean.
+        Must return True if validator constraint is satisfied.
+        If constraint is not satisfied, it should either return False or
+          raise gflags_validators.Error(desired_error_message).
+    message: error text to be shown to the user if checker returns False.
+      If checker raises gflags_validators.Error, message from the raised
+        Error will be shown.
+    flag_values: FlagValues
+  Raises:
+    AttributeError: if flag_name is not registered as a valid flag name.
+  """
+  flag_values.AddValidator(gflags_validators.SimpleValidator(flag_name,
+                                                            checker,
+                                                            message))
+
+
+def MarkFlagAsRequired(flag_name, flag_values=FLAGS):
+  """Ensure that flag is not None during program execution.
+
+  Registers a flag validator, which will follow usual validator
+  rules.
+  Args:
+    flag_name: string, name of the flag
+    flag_values: FlagValues
+  Raises:
+    AttributeError: if flag_name is not registered as a valid flag name.
+  """
+  RegisterValidator(flag_name,
+                    lambda value: value is not None,
+                    message='Flag --%s must be specified.' % flag_name,
+                    flag_values=flag_values)
+
+
+def _RegisterBoundsValidatorIfNeeded(parser, name, flag_values):
+  """Enforce lower and upper bounds for numeric flags.
+
+  Args:
+    parser: NumericParser (either FloatParser or IntegerParser). Provides lower
+      and upper bounds, and help text to display.
+    name: string, name of the flag
+    flag_values: FlagValues
+  """
+  if parser.lower_bound is not None or parser.upper_bound is not None:
+
+    def Checker(value):
+      if value is not None and parser.IsOutsideBounds(value):
+        message = '%s is not %s' % (value, parser.syntactic_help)
+        raise gflags_validators.Error(message)
+      return True
+
+    RegisterValidator(name,
+                      Checker,
+                      flag_values=flag_values)
+
+
+# The DEFINE functions are explained in mode details in the module doc string.
+
+
+def DEFINE(parser, name, default, help, flag_values=FLAGS, serializer=None,
+           **args):
+  """Registers a generic Flag object.
+
+  NOTE: in the docstrings of all DEFINE* functions, "registers" is short
+  for "creates a new flag and registers it".
+
+  Auxiliary function: clients should use the specialized DEFINE_<type>
+  function instead.
+
+  Args:
+    parser: ArgumentParser that is used to parse the flag arguments.
+    name: A string, the flag name.
+    default: The default value of the flag.
+    help: A help string.
+    flag_values: FlagValues object the flag will be registered with.
+    serializer: ArgumentSerializer that serializes the flag value.
+    args: Dictionary with extra keyword args that are passes to the
+      Flag __init__.
+  """
+  DEFINE_flag(Flag(parser, serializer, name, default, help, **args),
+              flag_values)
+
+
+def DEFINE_flag(flag, flag_values=FLAGS):
+  """Registers a 'Flag' object with a 'FlagValues' object.
+
+  By default, the global FLAGS 'FlagValue' object is used.
+
+  Typical users will use one of the more specialized DEFINE_xxx
+  functions, such as DEFINE_string or DEFINE_integer.  But developers
+  who need to create Flag objects themselves should use this function
+  to register their flags.
+  """
+  # copying the reference to flag_values prevents pychecker warnings
+  fv = flag_values
+  fv[flag.name] = flag
+  # Tell flag_values who's defining the flag.
+  if isinstance(flag_values, FlagValues):
+    # Regarding the above isinstance test: some users pass funny
+    # values of flag_values (e.g., {}) in order to avoid the flag
+    # registration (in the past, there used to be a flag_values ==
+    # FLAGS test here) and redefine flags with the same name (e.g.,
+    # debug).  To avoid breaking their code, we perform the
+    # registration only if flag_values is a real FlagValues object.
+    module, module_name = _GetCallingModuleObjectAndName()
+    flag_values._RegisterFlagByModule(module_name, flag)
+    flag_values._RegisterFlagByModuleId(id(module), flag)
+
+
+def _InternalDeclareKeyFlags(flag_names,
+                             flag_values=FLAGS, key_flag_values=None):
+  """Declares a flag as key for the calling module.
+
+  Internal function.  User code should call DECLARE_key_flag or
+  ADOPT_module_key_flags instead.
+
+  Args:
+    flag_names: A list of strings that are names of already-registered
+      Flag objects.
+    flag_values: A FlagValues object that the flags listed in
+      flag_names have registered with (the value of the flag_values
+      argument from the DEFINE_* calls that defined those flags).
+      This should almost never need to be overridden.
+    key_flag_values: A FlagValues object that (among possibly many
+      other things) keeps track of the key flags for each module.
+      Default None means "same as flag_values".  This should almost
+      never need to be overridden.
+
+  Raises:
+    UnrecognizedFlagError: when we refer to a flag that was not
+      defined yet.
+  """
+  key_flag_values = key_flag_values or flag_values
+
+  module = _GetCallingModule()
+
+  for flag_name in flag_names:
+    if flag_name not in flag_values:
+      raise UnrecognizedFlagError(flag_name)
+    flag = flag_values.FlagDict()[flag_name]
+    key_flag_values._RegisterKeyFlagForModule(module, flag)
+
+
+def DECLARE_key_flag(flag_name, flag_values=FLAGS):
+  """Declares one flag as key to the current module.
+
+  Key flags are flags that are deemed really important for a module.
+  They are important when listing help messages; e.g., if the
+  --helpshort command-line flag is used, then only the key flags of the
+  main module are listed (instead of all flags, as in the case of
+  --help).
+
+  Sample usage:
+
+    gflags.DECLARED_key_flag('flag_1')
+
+  Args:
+    flag_name: A string, the name of an already declared flag.
+      (Redeclaring flags as key, including flags implicitly key
+      because they were declared in this module, is a no-op.)
+    flag_values: A FlagValues object.  This should almost never
+      need to be overridden.
+  """
+  if flag_name in _SPECIAL_FLAGS:
+    # Take care of the special flags, e.g., --flagfile, --undefok.
+    # These flags are defined in _SPECIAL_FLAGS, and are treated
+    # specially during flag parsing, taking precedence over the
+    # user-defined flags.
+    _InternalDeclareKeyFlags([flag_name],
+                             flag_values=_SPECIAL_FLAGS,
+                             key_flag_values=flag_values)
+    return
+  _InternalDeclareKeyFlags([flag_name], flag_values=flag_values)
+
+
+def ADOPT_module_key_flags(module, flag_values=FLAGS):
+  """Declares that all flags key to a module are key to the current module.
+
+  Args:
+    module: A module object.
+    flag_values: A FlagValues object.  This should almost never need
+      to be overridden.
+
+  Raises:
+    FlagsError: When given an argument that is a module name (a
+    string), instead of a module object.
+  """
+  # NOTE(salcianu): an even better test would be if not
+  # isinstance(module, types.ModuleType) but I didn't want to import
+  # types for such a tiny use.
+  if isinstance(module, str):
+    raise FlagsError('Received module name %s; expected a module object.'
+                     % module)
+  _InternalDeclareKeyFlags(
+      [f.name for f in flag_values._GetKeyFlagsForModule(module.__name__)],
+      flag_values=flag_values)
+  # If module is this flag module, take _SPECIAL_FLAGS into account.
+  if module == _GetThisModuleObjectAndName()[0]:
+    _InternalDeclareKeyFlags(
+        # As we associate flags with _GetCallingModuleObjectAndName(), the
+        # special flags defined in this module are incorrectly registered with
+        # a different module.  So, we can't use _GetKeyFlagsForModule.
+        # Instead, we take all flags from _SPECIAL_FLAGS (a private
+        # FlagValues, where no other module should register flags).
+        [f.name for f in _SPECIAL_FLAGS.FlagDict().values()],
+        flag_values=_SPECIAL_FLAGS,
+        key_flag_values=flag_values)
+
+
+#
+# STRING FLAGS
+#
+
+
+def DEFINE_string(name, default, help, flag_values=FLAGS, **args):
+  """Registers a flag whose value can be any string."""
+  parser = ArgumentParser()
+  serializer = ArgumentSerializer()
+  DEFINE(parser, name, default, help, flag_values, serializer, **args)
+
+
+#
+# BOOLEAN FLAGS
+#
+
+
+class BooleanParser(ArgumentParser):
+  """Parser of boolean values."""
+
+  def Convert(self, argument):
+    """Converts the argument to a boolean; raise ValueError on errors."""
+    if type(argument) == str:
+      if argument.lower() in ['true', 't', '1']:
+        return True
+      elif argument.lower() in ['false', 'f', '0']:
+        return False
+
+    bool_argument = bool(argument)
+    if argument == bool_argument:
+      # The argument is a valid boolean (True, False, 0, or 1), and not just
+      # something that always converts to bool (list, string, int, etc.).
+      return bool_argument
+
+    raise ValueError('Non-boolean argument to boolean flag', argument)
+
+  def Parse(self, argument):
+    val = self.Convert(argument)
+    return val
+
+  def Type(self):
+    return 'bool'
+
+
+class BooleanFlag(Flag):
+  """Basic boolean flag.
+
+  Boolean flags do not take any arguments, and their value is either
+  True (1) or False (0).  The false value is specified on the command
+  line by prepending the word 'no' to either the long or the short flag
+  name.
+
+  For example, if a Boolean flag was created whose long name was
+  'update' and whose short name was 'x', then this flag could be
+  explicitly unset through either --noupdate or --nox.
+  """
+
+  def __init__(self, name, default, help, short_name=None, **args):
+    p = BooleanParser()
+    Flag.__init__(self, p, None, name, default, help, short_name, 1, **args)
+    if not self.help: self.help = "a boolean value"
+
+
+def DEFINE_boolean(name, default, help, flag_values=FLAGS, **args):
+  """Registers a boolean flag.
+
+  Such a boolean flag does not take an argument.  If a user wants to
+  specify a false value explicitly, the long option beginning with 'no'
+  must be used: i.e. --noflag
+
+  This flag will have a value of None, True or False.  None is possible
+  if default=None and the user does not specify the flag on the command
+  line.
+  """
+  DEFINE_flag(BooleanFlag(name, default, help, **args), flag_values)
+
+
+# Match C++ API to unconfuse C++ people.
+DEFINE_bool = DEFINE_boolean
+
+
+class HelpFlag(BooleanFlag):
+  """
+  HelpFlag is a special boolean flag that prints usage information and
+  raises a SystemExit exception if it is ever found in the command
+  line arguments.  Note this is called with allow_override=1, so other
+  apps can define their own --help flag, replacing this one, if they want.
+  """
+  def __init__(self):
+    BooleanFlag.__init__(self, "help", 0, "show this help",
+                         short_name="?", allow_override=1)
+  def Parse(self, arg):
+    if arg:
+      doc = sys.modules["__main__"].__doc__
+      flags = str(FLAGS)
+      print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
+      if flags:
+        print "flags:"
+        print flags
+      sys.exit(1)
+class HelpXMLFlag(BooleanFlag):
+  """Similar to HelpFlag, but generates output in XML format."""
+  def __init__(self):
+    BooleanFlag.__init__(self, 'helpxml', False,
+                         'like --help, but generates XML output',
+                         allow_override=1)
+  def Parse(self, arg):
+    if arg:
+      FLAGS.WriteHelpInXMLFormat(sys.stdout)
+      sys.exit(1)
+class HelpshortFlag(BooleanFlag):
+  """
+  HelpshortFlag is a special boolean flag that prints usage
+  information for the "main" module, and rasies a SystemExit exception
+  if it is ever found in the command line arguments.  Note this is
+  called with allow_override=1, so other apps can define their own
+  --helpshort flag, replacing this one, if they want.
+  """
+  def __init__(self):
+    BooleanFlag.__init__(self, "helpshort", 0,
+                         "show usage only for this module", allow_override=1)
+  def Parse(self, arg):
+    if arg:
+      doc = sys.modules["__main__"].__doc__
+      flags = FLAGS.MainModuleHelp()
+      print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0])
+      if flags:
+        print "flags:"
+        print flags
+      sys.exit(1)
+
+#
+# Numeric parser - base class for Integer and Float parsers
+#
+
+
+class NumericParser(ArgumentParser):
+  """Parser of numeric values.
+
+  Parsed value may be bounded to a given upper and lower bound.
+  """
+
+  def IsOutsideBounds(self, val):
+    return ((self.lower_bound is not None and val < self.lower_bound) or
+            (self.upper_bound is not None and val > self.upper_bound))
+
+  def Parse(self, argument):
+    val = self.Convert(argument)
+    if self.IsOutsideBounds(val):
+      raise ValueError("%s is not %s" % (val, self.syntactic_help))
+    return val
+
+  def WriteCustomInfoInXMLFormat(self, outfile, indent):
+    if self.lower_bound is not None:
+      _WriteSimpleXMLElement(outfile, 'lower_bound', self.lower_bound, indent)
+    if self.upper_bound is not None:
+      _WriteSimpleXMLElement(outfile, 'upper_bound', self.upper_bound, indent)
+
+  def Convert(self, argument):
+    """Default implementation: always returns its argument unmodified."""
+    return argument
+
+# End of Numeric Parser
+
+#
+# FLOAT FLAGS
+#
+
+
+class FloatParser(NumericParser):
+  """Parser of floating point values.
+
+  Parsed value may be bounded to a given upper and lower bound.
+  """
+  number_article = "a"
+  number_name = "number"
+  syntactic_help = " ".join((number_article, number_name))
+
+  def __init__(self, lower_bound=None, upper_bound=None):
+    super(FloatParser, self).__init__()
+    self.lower_bound = lower_bound
+    self.upper_bound = upper_bound
+    sh = self.syntactic_help
+    if lower_bound is not None and upper_bound is not None:
+      sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
+    elif lower_bound == 0:
+      sh = "a non-negative %s" % self.number_name
+    elif upper_bound == 0:
+      sh = "a non-positive %s" % self.number_name
+    elif upper_bound is not None:
+      sh = "%s <= %s" % (self.number_name, upper_bound)
+    elif lower_bound is not None:
+      sh = "%s >= %s" % (self.number_name, lower_bound)
+    self.syntactic_help = sh
+
+  def Convert(self, argument):
+    """Converts argument to a float; raises ValueError on errors."""
+    return float(argument)
+
+  def Type(self):
+    return 'float'
+# End of FloatParser
+
+
+def DEFINE_float(name, default, help, lower_bound=None, upper_bound=None,
+                 flag_values=FLAGS, **args):
+  """Registers a flag whose value must be a float.
+
+  If lower_bound or upper_bound are set, then this flag must be
+  within the given range.
+  """
+  parser = FloatParser(lower_bound, upper_bound)
+  serializer = ArgumentSerializer()
+  DEFINE(parser, name, default, help, flag_values, serializer, **args)
+  _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
+
+#
+# INTEGER FLAGS
+#
+
+
+class IntegerParser(NumericParser):
+  """Parser of an integer value.
+
+  Parsed value may be bounded to a given upper and lower bound.
+  """
+  number_article = "an"
+  number_name = "integer"
+  syntactic_help = " ".join((number_article, number_name))
+
+  def __init__(self, lower_bound=None, upper_bound=None):
+    super(IntegerParser, self).__init__()
+    self.lower_bound = lower_bound
+    self.upper_bound = upper_bound
+    sh = self.syntactic_help
+    if lower_bound is not None and upper_bound is not None:
+      sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound))
+    elif lower_bound == 1:
+      sh = "a positive %s" % self.number_name
+    elif upper_bound == -1:
+      sh = "a negative %s" % self.number_name
+    elif lower_bound == 0:
+      sh = "a non-negative %s" % self.number_name
+    elif upper_bound == 0:
+      sh = "a non-positive %s" % self.number_name
+    elif upper_bound is not None:
+      sh = "%s <= %s" % (self.number_name, upper_bound)
+    elif lower_bound is not None:
+      sh = "%s >= %s" % (self.number_name, lower_bound)
+    self.syntactic_help = sh
+
+  def Convert(self, argument):
+    __pychecker__ = 'no-returnvalues'
+    if type(argument) == str:
+      base = 10
+      if len(argument) > 2 and argument[0] == "0" and argument[1] == "x":
+        base = 16
+      return int(argument, base)
+    else:
+      return int(argument)
+
+  def Type(self):
+    return 'int'
+
+
+def DEFINE_integer(name, default, help, lower_bound=None, upper_bound=None,
+                   flag_values=FLAGS, **args):
+  """Registers a flag whose value must be an integer.
+
+  If lower_bound, or upper_bound are set, then this flag must be
+  within the given range.
+  """
+  parser = IntegerParser(lower_bound, upper_bound)
+  serializer = ArgumentSerializer()
+  DEFINE(parser, name, default, help, flag_values, serializer, **args)
+  _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
+
+
+#
+# ENUM FLAGS
+#
+
+
+class EnumParser(ArgumentParser):
+  """Parser of a string enum value (a string value from a given set).
+
+  If enum_values (see below) is not specified, any string is allowed.
+  """
+
+  def __init__(self, enum_values=None):
+    super(EnumParser, self).__init__()
+    self.enum_values = enum_values
+
+  def Parse(self, argument):
+    if self.enum_values and argument not in self.enum_values:
+      raise ValueError("value should be one of <%s>" %
+                       "|".join(self.enum_values))
+    return argument
+
+  def Type(self):
+    return 'string enum'
+
+
+class EnumFlag(Flag):
+  """Basic enum flag; its value can be any string from list of enum_values."""
+
+  def __init__(self, name, default, help, enum_values=None,
+               short_name=None, **args):
+    enum_values = enum_values or []
+    p = EnumParser(enum_values)
+    g = ArgumentSerializer()
+    Flag.__init__(self, p, g, name, default, help, short_name, **args)
+    if not self.help: self.help = "an enum string"
+    self.help = "<%s>: %s" % ("|".join(enum_values), self.help)
+
+  def _WriteCustomInfoInXMLFormat(self, outfile, indent):
+    for enum_value in self.parser.enum_values:
+      _WriteSimpleXMLElement(outfile, 'enum_value', enum_value, indent)
+
+
+def DEFINE_enum(name, default, enum_values, help, flag_values=FLAGS,
+                **args):
+  """Registers a flag whose value can be any string from enum_values."""
+  DEFINE_flag(EnumFlag(name, default, help, enum_values, ** args),
+              flag_values)
+
+
+#
+# LIST FLAGS
+#
+
+
+class BaseListParser(ArgumentParser):
+  """Base class for a parser of lists of strings.
+
+  To extend, inherit from this class; from the subclass __init__, call
+
+    BaseListParser.__init__(self, token, name)
+
+  where token is a character used to tokenize, and name is a description
+  of the separator.
+  """
+
+  def __init__(self, token=None, name=None):
+    assert name
+    super(BaseListParser, self).__init__()
+    self._token = token
+    self._name = name
+    self.syntactic_help = "a %s separated list" % self._name
+
+  def Parse(self, argument):
+    if isinstance(argument, list):
+      return argument
+    elif argument == '':
+      return []
+    else:
+      return [s.strip() for s in argument.split(self._token)]
+
+  def Type(self):
+    return '%s separated list of strings' % self._name
+
+
+class ListParser(BaseListParser):
+  """Parser for a comma-separated list of strings."""
+
+  def __init__(self):
+    BaseListParser.__init__(self, ',', 'comma')
+
+  def WriteCustomInfoInXMLFormat(self, outfile, indent):
+    BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
+    _WriteSimpleXMLElement(outfile, 'list_separator', repr(','), indent)
+
+
+class WhitespaceSeparatedListParser(BaseListParser):
+  """Parser for a whitespace-separated list of strings."""
+
+  def __init__(self):
+    BaseListParser.__init__(self, None, 'whitespace')
+
+  def WriteCustomInfoInXMLFormat(self, outfile, indent):
+    BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent)
+    separators = list(string.whitespace)
+    separators.sort()
+    for ws_char in string.whitespace:
+      _WriteSimpleXMLElement(outfile, 'list_separator', repr(ws_char), indent)
+
+
+def DEFINE_list(name, default, help, flag_values=FLAGS, **args):
+  """Registers a flag whose value is a comma-separated list of strings."""
+  parser = ListParser()
+  serializer = ListSerializer(',')
+  DEFINE(parser, name, default, help, flag_values, serializer, **args)
+
+
+def DEFINE_spaceseplist(name, default, help, flag_values=FLAGS, **args):
+  """Registers a flag whose value is a whitespace-separated list of strings.
+
+  Any whitespace can be used as a separator.
+  """
+  parser = WhitespaceSeparatedListParser()
+  serializer = ListSerializer(' ')
+  DEFINE(parser, name, default, help, flag_values, serializer, **args)
+
+
+#
+# MULTI FLAGS
+#
+
+
+class MultiFlag(Flag):
+  """A flag that can appear multiple time on the command-line.
+
+  The value of such a flag is a list that contains the individual values
+  from all the appearances of that flag on the command-line.
+
+  See the __doc__ for Flag for most behavior of this class.  Only
+  differences in behavior are described here:
+
+    * The default value may be either a single value or a list of values.
+      A single value is interpreted as the [value] singleton list.
+
+    * The value of the flag is always a list, even if the option was
+      only supplied once, and even if the default value is a single
+      value
+  """
+
+  def __init__(self, *args, **kwargs):
+    Flag.__init__(self, *args, **kwargs)
+    self.help += ';\n    repeat this option to specify a list of values'
+
+  def Parse(self, arguments):
+    """Parses one or more arguments with the installed parser.
+
+    Args:
+      arguments: a single argument or a list of arguments (typically a
+        list of default values); a single argument is converted
+        internally into a list containing one item.
+    """
+    if not isinstance(arguments, list):
+      # Default value may be a list of values.  Most other arguments
+      # will not be, so convert them into a single-item list to make
+      # processing simpler below.
+      arguments = [arguments]
+
+    if self.present:
+      # keep a backup reference to list of previously supplied option values
+      values = self.value
+    else:
+      # "erase" the defaults with an empty list
+      values = []
+
+    for item in arguments:
+      # have Flag superclass parse argument, overwriting self.value reference
+      Flag.Parse(self, item)  # also increments self.present
+      values.append(self.value)
+
+    # put list of option values back in the 'value' attribute
+    self.value = values
+
+  def Serialize(self):
+    if not self.serializer:
+      raise FlagsError("Serializer not present for flag %s" % self.name)
+    if self.value is None:
+      return ''
+
+    s = ''
+
+    multi_value = self.value
+
+    for self.value in multi_value:
+      if s: s += ' '
+      s += Flag.Serialize(self)
+
+    self.value = multi_value
+
+    return s
+
+  def Type(self):
+    return 'multi ' + self.parser.Type()
+
+
+def DEFINE_multi(parser, serializer, name, default, help, flag_values=FLAGS,
+                 **args):
+  """Registers a generic MultiFlag that parses its args with a given parser.
+
+  Auxiliary function.  Normal users should NOT use it directly.
+
+  Developers who need to create their own 'Parser' classes for options
+  which can appear multiple times can call this module function to
+  register their flags.
+  """
+  DEFINE_flag(MultiFlag(parser, serializer, name, default, help, **args),
+              flag_values)
+
+
+def DEFINE_multistring(name, default, help, flag_values=FLAGS, **args):
+  """Registers a flag whose value can be a list of any strings.
+
+  Use the flag on the command line multiple times to place multiple
+  string values into the list.  The 'default' may be a single string
+  (which will be converted into a single-element list) or a list of
+  strings.
+  """
+  parser = ArgumentParser()
+  serializer = ArgumentSerializer()
+  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
+
+
+def DEFINE_multi_int(name, default, help, lower_bound=None, upper_bound=None,
+                     flag_values=FLAGS, **args):
+  """Registers a flag whose value can be a list of arbitrary integers.
+
+  Use the flag on the command line multiple times to place multiple
+  integer values into the list.  The 'default' may be a single integer
+  (which will be converted into a single-element list) or a list of
+  integers.
+  """
+  parser = IntegerParser(lower_bound, upper_bound)
+  serializer = ArgumentSerializer()
+  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
+
+
+def DEFINE_multi_float(name, default, help, lower_bound=None, upper_bound=None,
+                       flag_values=FLAGS, **args):
+  """Registers a flag whose value can be a list of arbitrary floats.
+
+  Use the flag on the command line multiple times to place multiple
+  float values into the list.  The 'default' may be a single float
+  (which will be converted into a single-element list) or a list of
+  floats.
+  """
+  parser = FloatParser(lower_bound, upper_bound)
+  serializer = ArgumentSerializer()
+  DEFINE_multi(parser, serializer, name, default, help, flag_values, **args)
+
+
+# Now register the flags that we want to exist in all applications.
+# These are all defined with allow_override=1, so user-apps can use
+# these flagnames for their own purposes, if they want.
+DEFINE_flag(HelpFlag())
+DEFINE_flag(HelpshortFlag())
+DEFINE_flag(HelpXMLFlag())
+
+# Define special flags here so that help may be generated for them.
+# NOTE: Please do NOT use _SPECIAL_FLAGS from outside this module.
+_SPECIAL_FLAGS = FlagValues()
+
+
+DEFINE_string(
+    'flagfile', "",
+    "Insert flag definitions from the given file into the command line.",
+    _SPECIAL_FLAGS)
+
+DEFINE_string(
+    'undefok', "",
+    "comma-separated list of flag names that it is okay to specify "
+    "on the command line even if the program does not define a flag "
+    "with that name.  IMPORTANT: flags in this list that have "
+    "arguments MUST use the --flag=value format.", _SPECIAL_FLAGS)
diff --git a/gflags/gflags_validators.py b/gflags/gflags_validators.py
new file mode 100644
index 0000000..d83058d
--- /dev/null
+++ b/gflags/gflags_validators.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Module to enforce different constraints on flags.
+
+A validator represents an invariant, enforced over a one or more flags.
+See 'FLAGS VALIDATORS' in gflags.py's docstring for a usage manual.
+"""
+
+__author__ = 'olexiy@google.com (Olexiy Oryeshko)'
+
+
+class Error(Exception):
+  """Thrown If validator constraint is not satisfied."""
+
+
+class Validator(object):
+  """Base class for flags validators.
+
+  Users should NOT overload these classes, and use gflags.Register...
+  methods instead.
+  """
+
+  # Used to assign each validator an unique insertion_index
+  validators_count = 0
+
+  def __init__(self, checker, message):
+    """Constructor to create all validators.
+
+    Args:
+      checker: function to verify the constraint.
+        Input of this method varies, see SimpleValidator and
+          DictionaryValidator for a detailed description.
+      message: string, error message to be shown to the user
+    """
+    self.checker = checker
+    self.message = message
+    Validator.validators_count += 1
+    # Used to assert validators in the order they were registered (CL/18694236)
+    self.insertion_index = Validator.validators_count
+
+  def Verify(self, flag_values):
+    """Verify that constraint is satisfied.
+
+    flags library calls this method to verify Validator's constraint.
+    Args:
+      flag_values: gflags.FlagValues, containing all flags
+    Raises:
+      Error: if constraint is not satisfied.
+    """
+    param = self._GetInputToCheckerFunction(flag_values)
+    if not self.checker(param):
+      raise Error(self.message)
+
+  def GetFlagsNames(self):
+    """Return the names of the flags checked by this validator.
+
+    Returns:
+      [string], names of the flags
+    """
+    raise NotImplementedError('This method should be overloaded')
+
+  def PrintFlagsWithValues(self, flag_values):
+    raise NotImplementedError('This method should be overloaded')
+
+  def _GetInputToCheckerFunction(self, flag_values):
+    """Given flag values, construct the input to be given to checker.
+
+    Args:
+      flag_values: gflags.FlagValues, containing all flags.
+    Returns:
+      Return type depends on the specific validator.
+    """
+    raise NotImplementedError('This method should be overloaded')
+
+
+class SimpleValidator(Validator):
+  """Validator behind RegisterValidator() method.
+
+  Validates that a single flag passes its checker function. The checker function
+  takes the flag value and returns True (if value looks fine) or, if flag value
+  is not valid, either returns False or raises an Exception."""
+  def __init__(self, flag_name, checker, message):
+    """Constructor.
+
+    Args:
+      flag_name: string, name of the flag.
+      checker: function to verify the validator.
+        input  - value of the corresponding flag (string, boolean, etc).
+        output - Boolean. Must return True if validator constraint is satisfied.
+          If constraint is not satisfied, it should either return False or
+          raise Error.
+      message: string, error message to be shown to the user if validator's
+        condition is not satisfied
+    """
+    super(SimpleValidator, self).__init__(checker, message)
+    self.flag_name = flag_name
+
+  def GetFlagsNames(self):
+    return [self.flag_name]
+
+  def PrintFlagsWithValues(self, flag_values):
+    return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value)
+
+  def _GetInputToCheckerFunction(self, flag_values):
+    """Given flag values, construct the input to be given to checker.
+
+    Args:
+      flag_values: gflags.FlagValues
+    Returns:
+      value of the corresponding flag.
+    """
+    return flag_values[self.flag_name].value
+
+
+class DictionaryValidator(Validator):
+  """Validator behind RegisterDictionaryValidator method.
+
+  Validates that flag values pass their common checker function. The checker
+  function takes flag values and returns True (if values look fine) or,
+  if values are not valid, either returns False or raises an Exception.
+  """
+  def __init__(self, flag_names, checker, message):
+    """Constructor.
+
+    Args:
+      flag_names: [string], containing names of the flags used by checker.
+      checker: function to verify the validator.
+        input  - dictionary, with keys() being flag_names, and value for each
+          key being the value of the corresponding flag (string, boolean, etc).
+        output - Boolean. Must return True if validator constraint is satisfied.
+          If constraint is not satisfied, it should either return False or
+          raise Error.
+      message: string, error message to be shown to the user if validator's
+        condition is not satisfied
+    """
+    super(DictionaryValidator, self).__init__(checker, message)
+    self.flag_names = flag_names
+
+  def _GetInputToCheckerFunction(self, flag_values):
+    """Given flag values, construct the input to be given to checker.
+
+    Args:
+      flag_values: gflags.FlagValues
+    Returns:
+      dictionary, with keys() being self.lag_names, and value for each key
+        being the value of the corresponding flag (string, boolean, etc).
+    """
+    return dict([key, flag_values[key].value] for key in self.flag_names)
+
+  def PrintFlagsWithValues(self, flag_values):
+    prefix = 'flags '
+    flags_with_values = []
+    for key in self.flag_names:
+      flags_with_values.append('%s=%s' % (key, flag_values[key].value))
+    return prefix + ', '.join(flags_with_values)
+
+  def GetFlagsNames(self):
+    return self.flag_names
diff --git a/google/appengine/api/app_identity/app_identity.py b/google/appengine/api/app_identity/app_identity.py
index 535f64c..448aaa2 100644
--- a/google/appengine/api/app_identity/app_identity.py
+++ b/google/appengine/api/app_identity/app_identity.py
@@ -55,6 +55,8 @@
            'get_access_token',
            'get_access_token_uncached',
            'make_get_access_token_call',
+           'get_default_gcs_bucket_name',
+           'make_get_default_gcs_bucket_name_call',
           ]
 
 
@@ -62,6 +64,7 @@
 _SIGN_FOR_APP_METHOD_NAME = 'SignForApp'
 _GET_CERTS_METHOD_NAME = 'GetPublicCertificatesForApp'
 _GET_SERVICE_ACCOUNT_NAME_METHOD_NAME = 'GetServiceAccountName'
+_GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME = 'GetDefaultGcsBucketName'
 _GET_ACCESS_TOKEN_METHOD_NAME = 'GetAccessToken'
 _PARTITION_SEPARATOR = '~'
 _DOMAIN_SEPARATOR = ':'
@@ -285,6 +288,51 @@
                 response, get_service_account_name_result)
 
 
+def make_get_default_gcs_bucket_name_call(rpc):
+  """Get default google storage bucket name for the app.
+
+  Args:
+    rpc: A UserRPC object.
+
+  Returns:
+    Default Google Storage Bucket name of the app.
+  """
+  request = app_identity_service_pb.GetDefaultGcsBucketNameRequest()
+  response = app_identity_service_pb.GetDefaultGcsBucketNameResponse()
+
+  if rpc.deadline is not None:
+    request.set_deadline(rpc.deadline)
+
+  def get_default_gcs_bucket_name_result(rpc):
+    """Check success, handle exceptions, and return converted RPC result.
+
+    This method waits for the RPC if it has not yet finished, and calls the
+    post-call hooks on the first invocation.
+
+    Args:
+      rpc: A UserRPC object.
+
+    Returns:
+      A string which is the name of the app's default google storage bucket.
+    """
+    assert rpc.service == _APP_IDENTITY_SERVICE_NAME, repr(rpc.service)
+    assert rpc.method == _GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME, (
+        repr(rpc.method))
+    try:
+      rpc.check_success()
+    except apiproxy_errors.ApplicationError, err:
+      raise _to_app_identity_error(err)
+
+    if response.has_default_gcs_bucket_name():
+      return response.default_gcs_bucket_name()
+    else:
+      return None
+
+
+  rpc.make_call(_GET_DEFAULT_GCS_BUCKET_NAME_METHOD_NAME, request,
+                response, get_default_gcs_bucket_name_result)
+
+
 def sign_blob(bytes_to_sign, deadline=None):
   """Signs a blob.
 
@@ -334,6 +382,22 @@
   return rpc.get_result()
 
 
+def get_default_gcs_bucket_name(deadline=None):
+  """Gets the default gs bucket name for the app.
+
+  Args:
+    deadline: Optional deadline in seconds for the operation; the default
+      is a system-specific deadline (typically 5 seconds).
+
+  Returns:
+    Default bucket name for the app.
+  """
+  rpc = create_rpc(deadline)
+  make_get_default_gcs_bucket_name_call(rpc)
+  rpc.wait()
+  return rpc.get_result()
+
+
 def _ParseFullAppId(app_id):
   """Parse a full app id into partition, domain name and display app_id.
 
diff --git a/google/appengine/api/app_identity/app_identity_service_pb.py b/google/appengine/api/app_identity/app_identity_service_pb.py
index f612420..75337bf 100644
--- a/google/appengine/api/app_identity/app_identity_service_pb.py
+++ b/google/appengine/api/app_identity/app_identity_service_pb.py
@@ -171,7 +171,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.AppIdentityServiceError'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3JzeglFcnJvckNvZGWLAZIBB1NVQ0NFU1OYAQCMAYsBkgENVU5LTk9XTl9TQ09QRZgBCYwBiwGSAQ5CTE9CX1RPT19MQVJHRZgB6AeMAYsBkgERREVBRExJTkVfRVhDRUVERUSYAekHjAGLAZIBD05PVF9BX1ZBTElEX0FQUJgB6geMAYsBkgENVU5LTk9XTl9FUlJPUpgB6weMAYsBkgEZR0FJQU1JTlRfTk9UX0lOSVRJQUlMSVpFRJgB7AeMAYsBkgELTk9UX0FMTE9XRUSYAe0HjAGLAZIBD05PVF9JTVBMRU1FTlRFRJgB7geMAXS6AY0LCjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8SCmFwcGhvc3Rpbmci5gEKF0FwcElkZW50aXR5U2VydmljZUVycm9yIsoBCglFcnJvckNvZGUSCwoHU1VDQ0VTUxAAEhEKDVVOS05PV05fU0NPUEUQCRITCg5CTE9CX1RPT19MQVJHRRDoBxIWChFERUFETElORV9FWENFRURFRBDpBxIUCg9OT1RfQV9WQUxJRF9BUFAQ6gcSEgoNVU5LTk9XTl9FUlJPUhDrBxIeChlHQUlBTUlOVF9OT1RfSU5JVElBSUxJWkVEEOwHEhAKC05PVF9BTExPV0VEEO0HEhQKD05PVF9JTVBMRU1FTlRFRBDuByIqChFTaWduRm9yQXBwUmVxdWVzdBIVCg1ieXRlc190b19zaWduGAEgASgMIj8KElNpZ25Gb3JBcHBSZXNwb25zZRIQCghrZXlfbmFtZRgBIAEoCRIXCg9zaWduYXR1cmVfYnl0ZXMYAiABKAwiIwohR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0IkMKEVB1YmxpY0NlcnRpZmljYXRlEhAKCGtleV9uYW1lGAEgASgJEhwKFHg1MDlfY2VydGlmaWNhdGVfcGVtGAIgASgJIo0BCiJHZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlc3BvbnNlEj4KF3B1YmxpY19jZXJ0aWZpY2F0ZV9saXN0GAEgAygLMh0uYXBwaG9zdGluZy5QdWJsaWNDZXJ0aWZpY2F0ZRInCh9tYXhfY2xpZW50X2NhY2hlX3RpbWVfaW5fc2Vjb25kGAIgASgDIh4KHEdldFNlcnZpY2VBY2NvdW50TmFtZVJlcXVlc3QiPQodR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2USHAoUc2VydmljZV9hY2NvdW50X25hbWUYASABKAkiYAoVR2V0QWNjZXNzVG9rZW5SZXF1ZXN0Eg0KBXNjb3BlGAEgAygJEhoKEnNlcnZpY2VfYWNjb3VudF9pZBgCIAEoAxIcChRzZXJ2aWNlX2FjY291bnRfbmFtZRgDIAEoCSJHChZHZXRBY2Nlc3NUb2tlblJlc3BvbnNlEhQKDGFjY2Vzc190b2tlbhgBIAEoCRIXCg9leHBpcmF0aW9uX3RpbWUYAiABKAMyqgMKDlNpZ25pbmdTZXJ2aWNlEk0KClNpZ25Gb3JBcHASHS5hcHBob3N0aW5nLlNpZ25Gb3JBcHBSZXF1ZXN0Gh4uYXBwaG9zdGluZy5TaWduRm9yQXBwUmVzcG9uc2UiABJ+ChtHZXRQdWJsaWNDZXJ0aWZpY2F0ZXNGb3JBcHASLS5hcHBob3N0aW5nLkdldFB1YmxpY0NlcnRpZmljYXRlRm9yQXBwUmVxdWVzdBouLmFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXNwb25zZSIAEm4KFUdldFNlcnZpY2VBY2NvdW50TmFtZRIoLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdBopLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2UiABJZCg5HZXRBY2Nlc3NUb2tlbhIhLmFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0GiIuYXBwaG9zdGluZy5HZXRBY2Nlc3NUb2tlblJlc3BvbnNlIgBCQAokY29tLmdvb2dsZS5hcHBlbmdpbmUuYXBpLmFwcGlkZW50aXR5IAEoAkIUQXBwSWRlbnRpdHlTZXJ2aWNlUGI="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3JzeglFcnJvckNvZGWLAZIBB1NVQ0NFU1OYAQCMAYsBkgENVU5LTk9XTl9TQ09QRZgBCYwBiwGSAQ5CTE9CX1RPT19MQVJHRZgB6AeMAYsBkgERREVBRExJTkVfRVhDRUVERUSYAekHjAGLAZIBD05PVF9BX1ZBTElEX0FQUJgB6geMAYsBkgENVU5LTk9XTl9FUlJPUpgB6weMAYsBkgEZR0FJQU1JTlRfTk9UX0lOSVRJQUlMSVpFRJgB7AeMAYsBkgELTk9UX0FMTE9XRUSYAe0HjAGLAZIBD05PVF9JTVBMRU1FTlRFRJgB7geMAXS6AekMCjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8SCmFwcGhvc3Rpbmci5gEKF0FwcElkZW50aXR5U2VydmljZUVycm9yIsoBCglFcnJvckNvZGUSCwoHU1VDQ0VTUxAAEhEKDVVOS05PV05fU0NPUEUQCRITCg5CTE9CX1RPT19MQVJHRRDoBxIWChFERUFETElORV9FWENFRURFRBDpBxIUCg9OT1RfQV9WQUxJRF9BUFAQ6gcSEgoNVU5LTk9XTl9FUlJPUhDrBxIeChlHQUlBTUlOVF9OT1RfSU5JVElBSUxJWkVEEOwHEhAKC05PVF9BTExPV0VEEO0HEhQKD05PVF9JTVBMRU1FTlRFRBDuByIqChFTaWduRm9yQXBwUmVxdWVzdBIVCg1ieXRlc190b19zaWduGAEgASgMIj8KElNpZ25Gb3JBcHBSZXNwb25zZRIQCghrZXlfbmFtZRgBIAEoCRIXCg9zaWduYXR1cmVfYnl0ZXMYAiABKAwiIwohR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0IkMKEVB1YmxpY0NlcnRpZmljYXRlEhAKCGtleV9uYW1lGAEgASgJEhwKFHg1MDlfY2VydGlmaWNhdGVfcGVtGAIgASgJIo0BCiJHZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlc3BvbnNlEj4KF3B1YmxpY19jZXJ0aWZpY2F0ZV9saXN0GAEgAygLMh0uYXBwaG9zdGluZy5QdWJsaWNDZXJ0aWZpY2F0ZRInCh9tYXhfY2xpZW50X2NhY2hlX3RpbWVfaW5fc2Vjb25kGAIgASgDIh4KHEdldFNlcnZpY2VBY2NvdW50TmFtZVJlcXVlc3QiPQodR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2USHAoUc2VydmljZV9hY2NvdW50X25hbWUYASABKAkiYAoVR2V0QWNjZXNzVG9rZW5SZXF1ZXN0Eg0KBXNjb3BlGAEgAygJEhoKEnNlcnZpY2VfYWNjb3VudF9pZBgCIAEoAxIcChRzZXJ2aWNlX2FjY291bnRfbmFtZRgDIAEoCSJHChZHZXRBY2Nlc3NUb2tlblJlc3BvbnNlEhQKDGFjY2Vzc190b2tlbhgBIAEoCRIXCg9leHBpcmF0aW9uX3RpbWUYAiABKAMiIAoeR2V0RGVmYXVsdEdjc0J1Y2tldE5hbWVSZXF1ZXN0IkIKH0dldERlZmF1bHRHY3NCdWNrZXROYW1lUmVzcG9uc2USHwoXZGVmYXVsdF9nY3NfYnVja2V0X25hbWUYASABKAkyoAQKDlNpZ25pbmdTZXJ2aWNlEk0KClNpZ25Gb3JBcHASHS5hcHBob3N0aW5nLlNpZ25Gb3JBcHBSZXF1ZXN0Gh4uYXBwaG9zdGluZy5TaWduRm9yQXBwUmVzcG9uc2UiABJ+ChtHZXRQdWJsaWNDZXJ0aWZpY2F0ZXNGb3JBcHASLS5hcHBob3N0aW5nLkdldFB1YmxpY0NlcnRpZmljYXRlRm9yQXBwUmVxdWVzdBouLmFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXNwb25zZSIAEm4KFUdldFNlcnZpY2VBY2NvdW50TmFtZRIoLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdBopLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2UiABJZCg5HZXRBY2Nlc3NUb2tlbhIhLmFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0GiIuYXBwaG9zdGluZy5HZXRBY2Nlc3NUb2tlblJlc3BvbnNlIgASdAoXR2V0RGVmYXVsdEdjc0J1Y2tldE5hbWUSKi5hcHBob3N0aW5nLkdldERlZmF1bHRHY3NCdWNrZXROYW1lUmVxdWVzdBorLmFwcGhvc3RpbmcuR2V0RGVmYXVsdEdjc0J1Y2tldE5hbWVSZXNwb25zZSIAQkAKJGNvbS5nb29nbGUuYXBwZW5naW5lLmFwaS5hcHBpZGVudGl0eSABKAJCFEFwcElkZW50aXR5U2VydmljZVBi"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -1500,6 +1500,232 @@
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
 
+class GetDefaultGcsBucketNameRequest(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.GetDefaultGcsBucketNameRequest', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.GetDefaultGcsBucketNameRequest')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.GetDefaultGcsBucketNameRequest')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.GetDefaultGcsBucketNameRequest', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.GetDefaultGcsBucketNameRequest', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.GetDefaultGcsBucketNameRequest', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    return n
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def OutputPartial(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.GetDefaultGcsBucketNameRequest'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KKWFwcGhvc3RpbmcuR2V0RGVmYXVsdEdjc0J1Y2tldE5hbWVSZXF1ZXN0wgEiYXBwaG9zdGluZy5BcHBJZGVudGl0eVNlcnZpY2VFcnJvcg=="))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
+class GetDefaultGcsBucketNameResponse(ProtocolBuffer.ProtocolMessage):
+  has_default_gcs_bucket_name_ = 0
+  default_gcs_bucket_name_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def default_gcs_bucket_name(self): return self.default_gcs_bucket_name_
+
+  def set_default_gcs_bucket_name(self, x):
+    self.has_default_gcs_bucket_name_ = 1
+    self.default_gcs_bucket_name_ = x
+
+  def clear_default_gcs_bucket_name(self):
+    if self.has_default_gcs_bucket_name_:
+      self.has_default_gcs_bucket_name_ = 0
+      self.default_gcs_bucket_name_ = ""
+
+  def has_default_gcs_bucket_name(self): return self.has_default_gcs_bucket_name_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_default_gcs_bucket_name()): self.set_default_gcs_bucket_name(x.default_gcs_bucket_name())
+
+  if _net_proto___parse__python is not None:
+    def _CMergeFromString(self, s):
+      _net_proto___parse__python.MergeFromString(self, 'apphosting.GetDefaultGcsBucketNameResponse', s)
+
+  if _net_proto___parse__python is not None:
+    def _CEncode(self):
+      return _net_proto___parse__python.Encode(self, 'apphosting.GetDefaultGcsBucketNameResponse')
+
+  if _net_proto___parse__python is not None:
+    def _CEncodePartial(self):
+      return _net_proto___parse__python.EncodePartial(self, 'apphosting.GetDefaultGcsBucketNameResponse')
+
+  if _net_proto___parse__python is not None:
+    def _CToASCII(self, output_format):
+      return _net_proto___parse__python.ToASCII(self, 'apphosting.GetDefaultGcsBucketNameResponse', output_format)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCII(self, s):
+      _net_proto___parse__python.ParseASCII(self, 'apphosting.GetDefaultGcsBucketNameResponse', s)
+
+
+  if _net_proto___parse__python is not None:
+    def ParseASCIIIgnoreUnknown(self, s):
+      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.GetDefaultGcsBucketNameResponse', s)
+
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_default_gcs_bucket_name_ != x.has_default_gcs_bucket_name_: return 0
+    if self.has_default_gcs_bucket_name_ and self.default_gcs_bucket_name_ != x.default_gcs_bucket_name_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_default_gcs_bucket_name_): n += 1 + self.lengthString(len(self.default_gcs_bucket_name_))
+    return n
+
+  def ByteSizePartial(self):
+    n = 0
+    if (self.has_default_gcs_bucket_name_): n += 1 + self.lengthString(len(self.default_gcs_bucket_name_))
+    return n
+
+  def Clear(self):
+    self.clear_default_gcs_bucket_name()
+
+  def OutputUnchecked(self, out):
+    if (self.has_default_gcs_bucket_name_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.default_gcs_bucket_name_)
+
+  def OutputPartial(self, out):
+    if (self.has_default_gcs_bucket_name_):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.default_gcs_bucket_name_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_default_gcs_bucket_name(d.getPrefixedString())
+        continue
+
+
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_default_gcs_bucket_name_: res+=prefix+("default_gcs_bucket_name: %s\n" % self.DebugFormatString(self.default_gcs_bucket_name_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kdefault_gcs_bucket_name = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "default_gcs_bucket_name",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+  _PROTO_DESCRIPTOR_NAME = 'apphosting.GetDefaultGcsBucketNameResponse'
+  _SERIALIZED_DESCRIPTOR = array.array('B')
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KKmFwcGhvc3RpbmcuR2V0RGVmYXVsdEdjc0J1Y2tldE5hbWVSZXNwb25zZRMaF2RlZmF1bHRfZ2NzX2J1Y2tldF9uYW1lIAEoAjAJOAEUwgEiYXBwaG9zdGluZy5BcHBJZGVudGl0eVNlcnZpY2VFcnJvcg=="))
+  if _net_proto___parse__python is not None:
+    _net_proto___parse__python.RegisterType(
+        _SERIALIZED_DESCRIPTOR.tostring())
+
 
 
 class _SigningService_ClientBaseStub(_client_stub_base_class):
@@ -1510,6 +1736,7 @@
       '_protorpc_GetPublicCertificatesForApp', '_full_name_GetPublicCertificatesForApp',
       '_protorpc_GetServiceAccountName', '_full_name_GetServiceAccountName',
       '_protorpc_GetAccessToken', '_full_name_GetAccessToken',
+      '_protorpc_GetDefaultGcsBucketName', '_full_name_GetDefaultGcsBucketName',
   )
 
   def __init__(self, rpc_stub):
@@ -1531,6 +1758,10 @@
     self._full_name_GetAccessToken = self._stub.GetFullMethodName(
         'GetAccessToken')
 
+    self._protorpc_GetDefaultGcsBucketName = pywraprpc.RPC()
+    self._full_name_GetDefaultGcsBucketName = self._stub.GetFullMethodName(
+        'GetDefaultGcsBucketName')
+
   def SignForApp(self, request, rpc=None, callback=None, response=None):
     """Make a SignForApp RPC call.
 
@@ -1631,6 +1862,31 @@
                           callback,
                           self._protorpc_GetAccessToken)
 
+  def GetDefaultGcsBucketName(self, request, rpc=None, callback=None, response=None):
+    """Make a GetDefaultGcsBucketName RPC call.
+
+    Args:
+      request: a GetDefaultGcsBucketNameRequest instance.
+      rpc: Optional RPC instance to use for the call.
+      callback: Optional final callback. Will be called as
+          callback(rpc, result) when the rpc completes. If None, the
+          call is synchronous.
+      response: Optional ProtocolMessage to be filled in with response.
+
+    Returns:
+      The GetDefaultGcsBucketNameResponse if callback is None. Otherwise, returns None.
+    """
+
+    if response is None:
+      response = GetDefaultGcsBucketNameResponse
+    return self._MakeCall(rpc,
+                          self._full_name_GetDefaultGcsBucketName,
+                          'GetDefaultGcsBucketName',
+                          request,
+                          response,
+                          callback,
+                          self._protorpc_GetDefaultGcsBucketName)
+
 
 class _SigningService_ClientStub(_SigningService_ClientBaseStub):
   __slots__ = ('_params',)
@@ -1666,6 +1922,7 @@
       'GetPublicCertificatesForApp': (GetPublicCertificateForAppRequest, GetPublicCertificateForAppResponse),
       'GetServiceAccountName': (GetServiceAccountNameRequest, GetServiceAccountNameResponse),
       'GetAccessToken': (GetAccessTokenRequest, GetAccessTokenResponse),
+      'GetDefaultGcsBucketName': (GetDefaultGcsBucketNameRequest, GetDefaultGcsBucketNameResponse),
       }
 
   def __init__(self, *args, **kwargs):
@@ -1749,6 +2006,17 @@
     """
     raise NotImplementedError
 
+
+  def GetDefaultGcsBucketName(self, rpc, request, response):
+    """Handles a GetDefaultGcsBucketName RPC call. You should override this.
+
+    Args:
+      rpc: a Stubby RPC object
+      request: a GetDefaultGcsBucketNameRequest that contains the client request
+      response: a GetDefaultGcsBucketNameResponse that should be modified to send the response
+    """
+    raise NotImplementedError
+
   def _AddMethodAttributes(self):
     """Sets attributes on Python RPC handlers.
 
@@ -1778,8 +2046,14 @@
         GetAccessTokenResponse,
         None,
         'none')
+    rpcserver._GetHandlerDecorator(
+        self.GetDefaultGcsBucketName.im_func,
+        GetDefaultGcsBucketNameRequest,
+        GetDefaultGcsBucketNameResponse,
+        None,
+        'none')
 
 if _extension_runtime:
   pass
 
-__all__ = ['AppIdentityServiceError','SignForAppRequest','SignForAppResponse','GetPublicCertificateForAppRequest','PublicCertificate','GetPublicCertificateForAppResponse','GetServiceAccountNameRequest','GetServiceAccountNameResponse','GetAccessTokenRequest','GetAccessTokenResponse','SigningService']
+__all__ = ['AppIdentityServiceError','SignForAppRequest','SignForAppResponse','GetPublicCertificateForAppRequest','PublicCertificate','GetPublicCertificateForAppResponse','GetServiceAccountNameRequest','GetServiceAccountNameResponse','GetAccessTokenRequest','GetAccessTokenResponse','GetDefaultGcsBucketNameRequest','GetDefaultGcsBucketNameResponse','SigningService']
diff --git a/google/appengine/api/app_identity/app_identity_stub.py b/google/appengine/api/app_identity/app_identity_stub.py
index 17f67e3..2ce6d81 100644
--- a/google/appengine/api/app_identity/app_identity_stub.py
+++ b/google/appengine/api/app_identity/app_identity_stub.py
@@ -48,6 +48,7 @@
 from google.appengine.api import apiproxy_stub
 
 APP_SERVICE_ACCOUNT_NAME = 'test@localhost'
+APP_DEFAULT_GCS_BUCKET_NAME = 'app_default_bucket'
 
 SIGNING_KEY_NAME = 'key'
 
@@ -102,6 +103,7 @@
   def __init__(self, service_name='app_identity_service'):
     """Constructor."""
     super(AppIdentityServiceStub, self).__init__(service_name)
+    self.__default_gcs_bucket_name = APP_DEFAULT_GCS_BUCKET_NAME
 
   def _Dynamic_SignForApp(self, request, response):
     """Implementation of AppIdentityService::SignForApp."""
@@ -128,6 +130,16 @@
     """Implementation of AppIdentityService::GetServiceAccountName"""
     response.set_service_account_name(APP_SERVICE_ACCOUNT_NAME)
 
+  def _Dynamic_GetDefaultGcsBucketName(self, unused_request, response):
+    """Implementation of AppIdentityService::GetDefaultGcsBucketName."""
+    response.set_default_gcs_bucket_name(self.__default_gcs_bucket_name)
+
+  def SetDefaultGcsBucketName(self, default_gcs_bucket_name):
+    if default_gcs_bucket_name:
+      self.__default_gcs_bucket_name = default_gcs_bucket_name
+    else:
+      self.__default_gcs_bucket_name = APP_DEFAULT_GCS_BUCKET_NAME
+
   def _Dynamic_GetAccessToken(self, request, response):
     """Implementation of AppIdentityService::GetAccessToken.
 
diff --git a/google/appengine/api/appinfo_includes.py b/google/appengine/api/appinfo_includes.py
index 433bc88..5aea927 100644
--- a/google/appengine/api/appinfo_includes.py
+++ b/google/appengine/api/appinfo_includes.py
@@ -106,12 +106,16 @@
       appyaml.builtins.append(appinfo.BuiltinHandler(default='on'))
 
 
+
+  runtime_for_including = appyaml.runtime
+  if runtime_for_including == 'vm':
+    runtime_for_including = appyaml.vm_settings['vm_runtime']
   aggregate_appinclude, include_paths = (
       _ResolveIncludes(appinfo_path,
                        appinfo.AppInclude(builtins=appyaml.builtins,
                                           includes=appyaml.includes),
                        os.path.dirname(appinfo_path),
-                       appyaml.runtime,
+                       runtime_for_including,
                        open_fn=open_fn))
 
   return (
diff --git a/google/appengine/api/channel/channel_service_stub.py b/google/appengine/api/channel/channel_service_stub.py
index f189b97..533064b 100644
--- a/google/appengine/api/channel/channel_service_stub.py
+++ b/google/appengine/api/channel/channel_service_stub.py
@@ -27,6 +27,7 @@
 
 
 
+import hashlib
 import logging
 import random
 import time
@@ -36,6 +37,11 @@
 from google.appengine.runtime import apiproxy_errors
 
 
+def _GenerateTokenHash(token):
+  """Returns a MD5 hash of a token for integrity checking."""
+  return hashlib.md5(token).hexdigest()
+
+
 class Error(Exception):
   pass
 
@@ -117,10 +123,14 @@
 
     expiration_sec = long(self._time_func() + duration * 60) + 1
 
-    token = '-'.join([ChannelServiceStub.CHANNEL_TOKEN_IDENTIFIER,
-                      str(random.randint(0, 2 ** 32)),
-                      str(expiration_sec),
-                      client_id])
+    raw_token = '-'.join([ChannelServiceStub.CHANNEL_TOKEN_IDENTIFIER,
+                          str(random.randint(0, 2 ** 32)),
+                          str(expiration_sec),
+                          client_id])
+
+
+
+    token = '-'.join([_GenerateTokenHash(raw_token), raw_token])
 
     self._log('Creating channel token %s with client id %s and duration %s',
               token, request.application_key(), duration)
@@ -170,34 +180,43 @@
        or None if this token is incorrectly formed and doesn't map to a
        client id.
     """
-    pieces = token.split('-', 3)
-    if len(pieces) == 4:
-      return pieces[3]
-    else:
+    try:
+      return self.validate_token_and_extract_client_id(token)
+    except (InvalidTokenError, TokenTimedOutError):
       return None
 
-
-  def check_token_validity(self, token):
-    """Checks if a token is well-formed and its expiration status.
+  def validate_token_and_extract_client_id(self, token):
+    """Ensures token is well-formed and hasn't expired, and extracts client_id.
 
     Args:
       token: a token returned by CreateChannel.
 
     Returns:
-      A tuple (syntax_valid, time_valid) where syntax_valid is true if the
-      token is well-formed and time_valid is true if the token is not expired.
-      In other words, a usable token will return (true, true).
-    """
-    pieces = token.split('-', 3)
-    if len(pieces) != 4:
-      return False, False
+      A client_id, which is the value passed to CreateChannel.
 
-    (constant_identifier, token_id, expiration_sec, clientid) = pieces
-    syntax_valid = (
-        constant_identifier == ChannelServiceStub.CHANNEL_TOKEN_IDENTIFIER
-        and expiration_sec.isdigit())
-    time_valid = syntax_valid and long(expiration_sec) > self._time_func()
-    return (syntax_valid, time_valid)
+    Raises:
+      InvalidTokenError: The token is syntactically invalid.
+      TokenTimedOutError: The token expired or does not exist.
+    """
+
+    pieces = token.split('-', 1)
+    if len(pieces) != 2 or _GenerateTokenHash(pieces[1]) != pieces[0]:
+      raise InvalidTokenError()
+    raw_token = pieces[1]
+
+
+    pieces = raw_token.split('-', 3)
+    if len(pieces) != 4:
+      raise InvalidTokenError()
+
+    constant_id, unused_random_id, expiration_sec, client_id = pieces
+    if (constant_id != ChannelServiceStub.CHANNEL_TOKEN_IDENTIFIER
+        or not expiration_sec.isdigit()):
+      raise InvalidTokenError()
+    if long(expiration_sec) <= self._time_func():
+      raise TokenTimedOutError()
+
+    return client_id
 
   @apiproxy_stub.Synchronized
   def get_channel_messages(self, token):
@@ -327,13 +346,7 @@
       InvalidTokenError: The token is syntactically invalid.
       TokenTimedOutError: The token expired or does not exist.
     """
-    syntax_valid, time_valid = self.check_token_validity(token)
-    if not syntax_valid:
-      raise InvalidTokenError()
-    elif not time_valid:
-      raise TokenTimedOutError()
-
-    client_id = self.client_id_from_token(token)
+    client_id = self.validate_token_and_extract_client_id(token)
 
 
 
diff --git a/google/appengine/api/datastore.py b/google/appengine/api/datastore.py
index b26129b..9bc0cc6 100644
--- a/google/appengine/api/datastore.py
+++ b/google/appengine/api/datastore.py
@@ -364,35 +364,58 @@
 _ENV_KEY = '__DATASTORE_CONNECTION_INITIALIZED__'
 
 
+def __InitConnection():
+  """Internal method to make sure the connection state has been initialized."""
+
+
+
+
+
+
+
+
+
+
+
+
+  if os.getenv(_ENV_KEY) and hasattr(_thread_local, 'connection_stack'):
+    return
+  _thread_local.connection_stack = [datastore_rpc.Connection(adapter=_adapter)]
+
+  os.environ[_ENV_KEY] = '1'
+
+
 def _GetConnection():
-  """Retrieve a datastore connection local to the thread."""
-
-
-
-
-
-
-
-
-
-
-  connection = None
-  if os.getenv(_ENV_KEY):
-    try:
-      connection = _thread_local.connection
-    except AttributeError:
-      pass
-  if connection is None:
-    connection = datastore_rpc.Connection(adapter=_adapter)
-    _SetConnection(connection)
-  return connection
+  """Internal method to retrieve a datastore connection local to the thread."""
+  __InitConnection()
+  return _thread_local.connection_stack[-1]
 
 
 def _SetConnection(connection):
-  """Sets the datastore connection local to the thread."""
-  _thread_local.connection = connection
+  """Internal method to replace the current thread local connection."""
+  __InitConnection()
+  _thread_local.connection_stack[-1] = connection
 
-  os.environ[_ENV_KEY] = '1'
+
+def _PushConnection(new_connection):
+  """Internal method to save the current connection and sets a new one.
+
+  Args:
+    new_connection: The connection to set.
+  """
+  __InitConnection()
+  _thread_local.connection_stack.append(new_connection)
+
+
+def _PopConnection():
+  """Internal method to restores the previous connection.
+
+  Returns:
+    The current connection.
+  """
+
+  assert len(_thread_local.connection_stack) >= 2
+  return _thread_local.connection_stack.pop()
 
 
 
@@ -2573,72 +2596,64 @@
     elif options.propagation is datastore_rpc.TransactionOptions.INDEPENDENT:
 
 
-      txn_connection = _GetConnection()
-      _SetConnection(_thread_local.old_connection)
+      txn_connection = _PopConnection()
       try:
         return RunInTransactionOptions(options, function, *args, **kwargs)
       finally:
-        _SetConnection(txn_connection)
+        _PushConnection(txn_connection)
     return function(*args, **kwargs)
 
   if options.propagation is datastore_rpc.TransactionOptions.MANDATORY:
-    raise datastore_errors.BadRequestError(
-      'Requires an existing transaction.')
+    raise datastore_errors.BadRequestError('Requires an existing transaction.')
 
 
   retries = options.retries
   if retries is None:
     retries = DEFAULT_TRANSACTION_RETRIES
 
-  _thread_local.old_connection = _GetConnection()
+  conn = _GetConnection()
+  _PushConnection(None)
+  try:
 
-  for _ in range(0, retries + 1):
-    new_connection = _thread_local.old_connection.new_transaction(options)
-    _SetConnection(new_connection)
-    try:
-      ok, result = _DoOneTry(new_connection, function, args, kwargs)
+    for _ in range(0, retries + 1):
+      _SetConnection(conn.new_transaction(options))
+      ok, result = _DoOneTry(function, args, kwargs)
       if ok:
         return result
-    finally:
-      _SetConnection(_thread_local.old_connection)
+  finally:
+    _PopConnection()
 
 
   raise datastore_errors.TransactionFailedError(
     'The transaction could not be committed. Please try again.')
 
 
-def _DoOneTry(new_connection, function, args, kwargs):
+def _DoOneTry(function, args, kwargs):
   """Helper to call a function in a transaction, once.
 
   Args:
-    new_connection: The new, transactional, connection object.
     function: The function to call.
     *args: Tuple of positional arguments.
     **kwargs: Dict of keyword arguments.
   """
-
   try:
     result = function(*args, **kwargs)
-
   except:
     original_exception = sys.exc_info()
-
     try:
-      new_connection.rollback()
+      _GetConnection().rollback()
     except Exception:
 
 
 
       logging.exception('Exception sending Rollback:')
-
     type, value, trace = original_exception
     if isinstance(value, datastore_errors.Rollback):
       return True, None
     else:
       raise type, value, trace
-
   else:
-    if new_connection.commit():
+    if _GetConnection().commit():
       return True, result
     else:
 
@@ -2739,12 +2754,12 @@
             'Function cannot be called from within a transaction.')
 
 
-      txn_connection = _GetConnection()
-      _SetConnection(_thread_local.old_connection)
+
+      txn_connection = _PopConnection()
       try:
         return func(*args, **kwds)
       finally:
-        _SetConnection(txn_connection)
+        _PushConnection(txn_connection)
     return inner_wrapper
   return outer_wrapper
 
diff --git a/google/appengine/api/images/images_not_implemented_stub.py b/google/appengine/api/images/images_not_implemented_stub.py
index 1865ef5..dc1fd86 100644
--- a/google/appengine/api/images/images_not_implemented_stub.py
+++ b/google/appengine/api/images/images_not_implemented_stub.py
@@ -21,13 +21,17 @@
 """A NotImplemented Images API stub for when the PIL library is not found."""
 
 
+from google.appengine.api import apiproxy_stub
 from google.appengine.api.images import images_blob_stub
 
+_SERVICE_NAME = "images"
 
-class ImagesNotImplementedServiceStub(object):
+
+class ImagesNotImplementedServiceStub(apiproxy_stub.APIProxyStub):
   """Stub version of images API which raises a NotImplementedError."""
 
   def __init__(self, host_prefix=""):
+    super(ImagesNotImplementedServiceStub, self).__init__(_SERVICE_NAME)
     self._blob_stub = images_blob_stub.ImagesBlobStub(host_prefix)
 
   def MakeSyncCall(self, service, call, request, response, request_id=None):
@@ -41,7 +45,7 @@
       request_id: A unique string identifying the request associated with the
           API call.
     """
-    if service == "images":
+    if service == _SERVICE_NAME:
       if call == "GetUrlBase":
         self._blob_stub.GetUrlBase(request, response)
         return
diff --git a/google/appengine/api/logservice/logservice.py b/google/appengine/api/logservice/logservice.py
index a3a8b78..56b9cb3 100644
--- a/google/appengine/api/logservice/logservice.py
+++ b/google/appengine/api/logservice/logservice.py
@@ -933,10 +933,12 @@
                                'used at the same time.')
 
   if version_ids is None and module_versions is None:
+    module_version = request.add_module_version()
+    if os.environ['CURRENT_MODULE_ID'] != 'default':
 
-
-    version_id = os.environ['CURRENT_VERSION_ID']
-    request.add_module_version().set_version_id(version_id.split('.')[0])
+      module_version.set_module_id(os.environ['CURRENT_MODULE_ID'])
+    module_version.set_version_id(
+        os.environ['CURRENT_VERSION_ID'].split('.')[0])
 
   if module_versions:
     if not isinstance(module_versions, list):
diff --git a/google/appengine/api/memcache/__init__.py b/google/appengine/api/memcache/__init__.py
index 7641237..e20515d 100644
--- a/google/appengine/api/memcache/__init__.py
+++ b/google/appengine/api/memcache/__init__.py
@@ -326,11 +326,7 @@
                pload=None,
                pid=None,
                make_sync_call=None,
-               _app_id=None,
-               _num_memcacheg_backends=None,
-               _ignore_shardlock=None,
-               _memcache_pool_hint=None,
-               _memcache_sharding_strategy=None):
+               _app_id=None):
     """Create a new Client object.
 
     No parameters are required.
@@ -350,9 +346,6 @@
 
 
 
-
-
-
     self._pickler_factory = pickler
     self._unpickler_factory = unpickler
     self._pickle_protocol = pickleProtocol
@@ -417,9 +410,6 @@
       app_override = message.mutable_override()
       app_override.set_app_id(self._app_id)
 
-
-      app_override.set_num_memcacheg_backends(0)
-
   def set_servers(self, servers):
     """Sets the pool of memcache servers used by the client.
 
diff --git a/google/appengine/api/memcache/memcache_service_pb.py b/google/appengine/api/memcache/memcache_service_pb.py
index e5e2297..7559374 100644
--- a/google/appengine/api/memcache/memcache_service_pb.py
+++ b/google/appengine/api/memcache/memcache_service_pb.py
@@ -31,8 +31,6 @@
   _extension_runtime = False
   _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
 
-from google.appengine.base.memcache_sharding_strategy_pb import *
-import google.appengine.base.memcache_sharding_strategy_pb
 class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
 
 
@@ -127,10 +125,9 @@
   has_memcache_pool_hint_ = 0
   memcache_pool_hint_ = ""
   has_memcache_sharding_strategy_ = 0
-  memcache_sharding_strategy_ = None
+  memcache_sharding_strategy_ = ""
 
   def __init__(self, contents=None):
-    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def app_id(self): return self.app_id_
@@ -185,22 +182,16 @@
 
   def has_memcache_pool_hint(self): return self.has_memcache_pool_hint_
 
-  def memcache_sharding_strategy(self):
-    if self.memcache_sharding_strategy_ is None:
-      self.lazy_init_lock_.acquire()
-      try:
-        if self.memcache_sharding_strategy_ is None: self.memcache_sharding_strategy_ = Enum()
-      finally:
-        self.lazy_init_lock_.release()
-    return self.memcache_sharding_strategy_
+  def memcache_sharding_strategy(self): return self.memcache_sharding_strategy_
 
-  def mutable_memcache_sharding_strategy(self): self.has_memcache_sharding_strategy_ = 1; return self.memcache_sharding_strategy()
+  def set_memcache_sharding_strategy(self, x):
+    self.has_memcache_sharding_strategy_ = 1
+    self.memcache_sharding_strategy_ = x
 
   def clear_memcache_sharding_strategy(self):
-
     if self.has_memcache_sharding_strategy_:
-      self.has_memcache_sharding_strategy_ = 0;
-      if self.memcache_sharding_strategy_ is not None: self.memcache_sharding_strategy_.Clear()
+      self.has_memcache_sharding_strategy_ = 0
+      self.memcache_sharding_strategy_ = ""
 
   def has_memcache_sharding_strategy(self): return self.has_memcache_sharding_strategy_
 
@@ -211,7 +202,7 @@
     if (x.has_num_memcacheg_backends()): self.set_num_memcacheg_backends(x.num_memcacheg_backends())
     if (x.has_ignore_shardlock()): self.set_ignore_shardlock(x.ignore_shardlock())
     if (x.has_memcache_pool_hint()): self.set_memcache_pool_hint(x.memcache_pool_hint())
-    if (x.has_memcache_sharding_strategy()): self.mutable_memcache_sharding_strategy().MergeFrom(x.memcache_sharding_strategy())
+    if (x.has_memcache_sharding_strategy()): self.set_memcache_sharding_strategy(x.memcache_sharding_strategy())
 
   def Equals(self, x):
     if x is self: return 1
@@ -233,7 +224,6 @@
       initialized = 0
       if debug_strs is not None:
         debug_strs.append('Required field: app_id not set.')
-    if (self.has_memcache_sharding_strategy_ and not self.memcache_sharding_strategy_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -242,7 +232,7 @@
     if (self.has_num_memcacheg_backends_): n += 1 + self.lengthVarInt64(self.num_memcacheg_backends_)
     if (self.has_ignore_shardlock_): n += 2
     if (self.has_memcache_pool_hint_): n += 1 + self.lengthString(len(self.memcache_pool_hint_))
-    if (self.has_memcache_sharding_strategy_): n += 1 + self.lengthString(self.memcache_sharding_strategy_.ByteSize())
+    if (self.has_memcache_sharding_strategy_): n += 1 + self.lengthString(len(self.memcache_sharding_strategy_))
     return n + 1
 
   def ByteSizePartial(self):
@@ -253,7 +243,7 @@
     if (self.has_num_memcacheg_backends_): n += 1 + self.lengthVarInt64(self.num_memcacheg_backends_)
     if (self.has_ignore_shardlock_): n += 2
     if (self.has_memcache_pool_hint_): n += 1 + self.lengthString(len(self.memcache_pool_hint_))
-    if (self.has_memcache_sharding_strategy_): n += 1 + self.lengthString(self.memcache_sharding_strategy_.ByteSizePartial())
+    if (self.has_memcache_sharding_strategy_): n += 1 + self.lengthString(len(self.memcache_sharding_strategy_))
     return n
 
   def Clear(self):
@@ -277,8 +267,7 @@
       out.putPrefixedString(self.memcache_pool_hint_)
     if (self.has_memcache_sharding_strategy_):
       out.putVarInt32(42)
-      out.putVarInt32(self.memcache_sharding_strategy_.ByteSize())
-      self.memcache_sharding_strategy_.OutputUnchecked(out)
+      out.putPrefixedString(self.memcache_sharding_strategy_)
 
   def OutputPartial(self, out):
     if (self.has_app_id_):
@@ -295,8 +284,7 @@
       out.putPrefixedString(self.memcache_pool_hint_)
     if (self.has_memcache_sharding_strategy_):
       out.putVarInt32(42)
-      out.putVarInt32(self.memcache_sharding_strategy_.ByteSizePartial())
-      self.memcache_sharding_strategy_.OutputPartial(out)
+      out.putPrefixedString(self.memcache_sharding_strategy_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -314,10 +302,7 @@
         self.set_memcache_pool_hint(d.getPrefixedString())
         continue
       if tt == 42:
-        length = d.getVarInt32()
-        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
-        d.skip(length)
-        self.mutable_memcache_sharding_strategy().TryMerge(tmp)
+        self.set_memcache_sharding_strategy(d.getPrefixedString())
         continue
 
 
@@ -331,10 +316,7 @@
     if self.has_num_memcacheg_backends_: res+=prefix+("num_memcacheg_backends: %s\n" % self.DebugFormatInt32(self.num_memcacheg_backends_))
     if self.has_ignore_shardlock_: res+=prefix+("ignore_shardlock: %s\n" % self.DebugFormatBool(self.ignore_shardlock_))
     if self.has_memcache_pool_hint_: res+=prefix+("memcache_pool_hint: %s\n" % self.DebugFormatString(self.memcache_pool_hint_))
-    if self.has_memcache_sharding_strategy_:
-      res+=prefix+"memcache_sharding_strategy <\n"
-      res+=self.memcache_sharding_strategy_.__str__(prefix + "  ", printElemNumber)
-      res+=prefix+">\n"
+    if self.has_memcache_sharding_strategy_: res+=prefix+("memcache_sharding_strategy: %s\n" % self.DebugFormatString(self.memcache_sharding_strategy_))
     return res
 
 
diff --git a/google/appengine/api/modules/modules.py b/google/appengine/api/modules/modules.py
index 9eeff2c..343dbb9 100644
--- a/google/appengine/api/modules/modules.py
+++ b/google/appengine/api/modules/modules.py
@@ -96,12 +96,7 @@
   If this is version "v1" of module "module5" for app "my-app", this function
   will return "module5".
   """
-  module = _split_version_id(os.environ['CURRENT_VERSION_ID'])[0]
-  if not module:
-
-
-    return 'default'
-  return module
+  return os.environ['CURRENT_MODULE_ID']
 
 
 def get_current_version_name():
@@ -111,7 +106,7 @@
   will return "v1".
   """
 
-  return _split_version_id(os.environ['CURRENT_VERSION_ID'])[1]
+  return os.environ['CURRENT_VERSION_ID'].split('.')[0]
 
 
 def get_current_instance_id():
diff --git a/google/appengine/api/prospective_search/prospective_search_stub.py b/google/appengine/api/prospective_search/prospective_search_stub.py
index 4ebafdc..d887b0a 100644
--- a/google/appengine/api/prospective_search/prospective_search_stub.py
+++ b/google/appengine/api/prospective_search/prospective_search_stub.py
@@ -403,7 +403,7 @@
       elif op is QueryParser.GE:
         for val in field_vals:
           if val >= query_num: return True
-      elif op is QueryParser.LT:
+      elif op is QueryParser.LESSTHAN:
         for val in field_vals:
           if val < query_num: return True
       elif op is QueryParser.LE:
diff --git a/google/appengine/api/remote_socket/_remote_socket.py b/google/appengine/api/remote_socket/_remote_socket.py
index 27fcc3a..ecc5e8b 100644
--- a/google/appengine/api/remote_socket/_remote_socket.py
+++ b/google/appengine/api/remote_socket/_remote_socket.py
@@ -36,6 +36,7 @@
 
 
 
+
 import errno
 import os
 import re
diff --git a/google/appengine/api/remote_socket/_remote_socket.py.orig b/google/appengine/api/remote_socket/_remote_socket.py.orig
deleted file mode 100644
index ecc5e8b..0000000
--- a/google/appengine/api/remote_socket/_remote_socket.py.orig
+++ /dev/null
@@ -1,1180 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2007 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-
-"""Socket Module.
-
-This file is intended to provide the equivalent of
-python/Modules/socketmodule.c rather than python/Lib/socket.py which amongst
-other things adds a buffered file-like interface.
-"""
-
-
-
-
-
-
-
-
-
-
-
-
-
-import errno
-import os
-import re
-import struct
-import time
-import weakref
-
-from google.appengine.api import apiproxy_stub_map
-from google.appengine.api.remote_socket import remote_socket_service_pb
-
-from google.appengine.api.remote_socket._remote_socket_addr import *
-
-from google.appengine.api.remote_socket._remote_socket_error import *
-from google.appengine.runtime import apiproxy_errors
-
-has_ipv6 = True
-
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-
-SOMAXCONN = 128
-
-MSG_PEEK = 2
-MSG_WAITALL = 256
-
-IPPROTO_IP = 0
-IPPROTO_ICMP = 1
-IPPROTO_TCP = 6
-IPPROTO_UDP = 17
-
-IPPORT_RESERVED = 1024
-IPPORT_USERRESERVED = 5000
-
-INADDR_ANY = 0x00000000
-INADDR_BROADCAST = 0xffffffff
-INADDR_LOOPBACK = 0x7f000001
-INADDR_NONE = 0xffffffff
-
-(AI_PASSIVE, AI_CANONNAME, AI_NUMERICHOST, AI_NUMERICSERV, AI_V4MAPPED, AI_ALL,
- AI_ADDRCONFIG) = map(lambda x: 1 << x, range(7))
-
-RemoteSocketServiceError = remote_socket_service_pb.RemoteSocketServiceError
-
-
-def _ImportSymbols(protobuf, symbols, prefix='SOCKET_'):
-  """Import symbols defined in a protobuf into the global namespace."""
-  for sym in symbols:
-    globals()[sym] = getattr(protobuf, prefix + sym)
-
-
-_ImportSymbols(remote_socket_service_pb.ResolveReply, (
-    'EAI_ADDRFAMILY', 'EAI_AGAIN', 'EAI_BADFLAGS', 'EAI_FAIL', 'EAI_FAMILY',
-    'EAI_MEMORY', 'EAI_NODATA', 'EAI_NONAME', 'EAI_SERVICE', 'EAI_SOCKTYPE',
-    'EAI_SYSTEM', 'EAI_BADHINTS', 'EAI_PROTOCOL', 'EAI_OVERFLOW', 'EAI_MAX'))
-
-_ImportSymbols(remote_socket_service_pb.ShutDownRequest, (
-    'SHUT_RD', 'SHUT_WR', 'SHUT_RDWR'))
-
-_ImportSymbols(remote_socket_service_pb.SocketOption, (
-
-    'SOL_SOCKET', 'SOL_IP', 'SOL_TCP', 'SOL_UDP',
-
-    'SO_DEBUG', 'SO_REUSEADDR', 'SO_TYPE', 'SO_ERROR', 'SO_DONTROUTE',
-    'SO_BROADCAST', 'SO_SNDBUF', 'SO_RCVBUF', 'SO_KEEPALIVE',
-
-    'IP_TOS', 'IP_TTL', 'IP_HDRINCL', 'IP_OPTIONS',
-
-    'TCP_NODELAY', 'TCP_MAXSEG', 'TCP_CORK', 'TCP_KEEPIDLE', 'TCP_KEEPINTVL',
-    'TCP_KEEPCNT', 'TCP_SYNCNT', 'TCP_LINGER2', 'TCP_DEFER_ACCEPT',
-    'TCP_WINDOW_CLAMP', 'TCP_INFO', 'TCP_QUICKACK'))
-
-_ImportSymbols(remote_socket_service_pb.PollEvent, (
-    'POLLNONE', 'POLLIN', 'POLLPRI', 'POLLOUT', 'POLLERR', 'POLLHUP',
-    'POLLNVAL', 'POLLRDNORM', 'POLLRDBAND', 'POLLWRNORM', 'POLLWRBAND',
-    'POLLMSG', 'POLLREMOVE', 'POLLRDHUP'))
-
-
-_GLOBAL_DEFAULT_TIMEOUT = object()
-_GLOBAL_TIMEOUT_VALUE = -1.0
-
-_GLOBAL_SOCKET_NEXT_FILENO = 2**32
-_GLOBAL_SOCKET_MAP = weakref.WeakValueDictionary()
-
-_SERVICES = {
-    'ftp': [('tcp', 21), ('udp', 21)],
-    'ftp-data': [('tcp', 20), ('udp', 20)],
-    'http': [('tcp', 80), ('udp', 80)],
-    'pop3': [('tcp', 110), ('udp', 110)],
-    'pop3s': [('tcp', 995), ('udp', 995)],
-    'smtp': [('tcp', 25), ('udp', 25)],
-    'telnet': [('tcp', 23), ('udp', 23)],
-    'www': [('tcp', 80), ('udp', 80)],
-    'www-http': [('tcp', 80), ('udp', 80)],
-}
-
-_ERROR_MAP = {
-    RemoteSocketServiceError.PERMISSION_DENIED: errno.EACCES,
-    RemoteSocketServiceError.INVALID_REQUEST: errno.EINVAL,
-    RemoteSocketServiceError.SOCKET_CLOSED: errno.EPIPE,
-}
-
-_SOCK_PROTO_MAP = {
-    (SOCK_STREAM, IPPROTO_TCP): 'tcp',
-    (SOCK_DGRAM, IPPROTO_UDP): 'udp',
-}
-
-_ADDRESS_FAMILY_MAP = {
-    AF_INET: remote_socket_service_pb.CreateSocketRequest.IPv4,
-    AF_INET6: remote_socket_service_pb.CreateSocketRequest.IPv6,
-}
-
-_ADDRESS_FAMILY_LENGTH_MAP = {
-    4: AF_INET,
-    16: AF_INET6,
-}
-
-
-class SocketApiNotImplementedError(NotImplementedError, error):
-  pass
-
-
-def _SystemExceptionFromAppError(e):
-  app_error = e.application_error
-  if app_error in (RemoteSocketServiceError.SYSTEM_ERROR,
-                   RemoteSocketServiceError.GAI_ERROR):
-    error_detail = RemoteSocketServiceError()
-    try:
-      error_detail.ParseASCII(e.error_detail)
-    except NotImplementedError:
-
-
-      m = re.match(
-          r'system_error:\s*(-?\d+)\s*,?\s*error_detail:\s*"([^"]*)"\s*',
-          e.error_detail)
-      if m:
-        error_detail.set_system_error(int(m.group(1)))
-        error_detail.set_error_detail(m.group(2))
-      else:
-        error_detail.set_system_error(-1)
-        error_detail.set_error_detail(e.error_detail)
-    if app_error == RemoteSocketServiceError.SYSTEM_ERROR:
-      return error(error_detail.system_error(),
-                   (error_detail.error_detail() or
-                    os.strerror(error_detail.system_error())))
-    elif app_error == RemoteSocketServiceError.GAI_ERROR:
-      return gaierror(error_detail.system_error(),
-                      error_detail.error_detail())
-  elif app_error in _ERROR_MAP:
-    return error(_ERROR_MAP[app_error], os.strerror(_ERROR_MAP[app_error]))
-  else:
-    return e
-
-
-def _IsAddr(family, addr):
-  try:
-    inet_pton(family, addr)
-  except Exception:
-    return False
-  return True
-
-
-
-
-def _Resolve(name, families, use_dns=True, canonical=False):
-  for family in families:
-    if _IsAddr(family, name):
-
-      return (name, [], [name])
-
-
-  if use_dns:
-    canon, aliases, addresses = _ResolveName(name, families)
-    if addresses:
-      return (canon, aliases, addresses)
-
-  raise gaierror(EAI_NONAME, 'nodename nor servname provided, or not known')
-
-
-
-
-def _ResolveName(name, address_families=(AF_INET6, AF_INET)):
-  request = remote_socket_service_pb.ResolveRequest()
-  request.set_name(name)
-  for af in address_families:
-    request.add_address_families(_ADDRESS_FAMILY_MAP[af])
-
-  reply = remote_socket_service_pb.ResolveReply()
-
-  try:
-    apiproxy_stub_map.MakeSyncCall('remote_socket', 'Resolve', request, reply)
-  except apiproxy_errors.ApplicationError, e:
-    raise _SystemExceptionFromAppError(e)
-
-  canonical_name = reply.canonical_name()
-  aliases = reply.aliases_list()
-  addresses = [inet_ntop(_ADDRESS_FAMILY_LENGTH_MAP[len(a)], a)
-               for a in reply.packed_address_list()]
-  return canonical_name, aliases, addresses
-
-
-def _ResolveService(servicename, protocolname, numeric_only=False):
-  try:
-    return (protocolname, int(servicename))
-  except ValueError:
-    pass
-
-  if not numeric_only:
-    for protocol, port in _SERVICES.get(servicename, []):
-      if not protocolname or protocol == protocolname:
-        return (protocol, port)
-
-  raise gaierror(EAI_SERVICE, '')
-
-
-def gethostbyname(host):
-  """gethostbyname(host) -> address
-
-  Return the IP address (a string of the form '255.255.255.255') for a host.
-  """
-  return _Resolve(host, [AF_INET])[2][0]
-
-
-def gethostbyname_ex(host):
-  """gethostbyname_ex(host) -> (name, aliaslist, addresslist)
-
-  Return the true host name, a list of aliases, and a list of IP addresses,
-  for a host.  The host argument is a string giving a host name or IP number.
-  """
-  return _Resolve(host, [AF_INET])
-
-
-def gethostbyaddr(addr):
-
-  raise SocketApiNotImplementedError()
-
-
-def gethostname():
-  """gethostname() -> string
-
-  Return the current host name.
-  """
-  return os.environ.get('HTTP_HOST', 'www.appspot.com')
-
-
-def getprotobyname(protocolname):
-  raise SocketApiNotImplementedError()
-
-
-def getservbyname(servicename, protocolname=None):
-  """getservbyname(servicename[, protocolname]) -> integer
-
-  Return a port number from a service name and protocol name.
-  The optional protocol name, if given, should be 'tcp' or 'udp',
-  otherwise any protocol will match.
-  """
-  return _ResolveService(servicename, protocolname)[1]
-
-
-def getservbyport(portnumber, protocolname=0):
-  raise SocketApiNotImplementedError()
-
-
-
-
-def getaddrinfo(host, service, family=AF_UNSPEC, socktype=0, proto=0, flags=0):
-  """getaddrinfo(host, port [, family, socktype, proto, flags])
-      -> list of (family, socktype, proto, canonname, sockaddr)
-
-  Resolve host and port into addrinfo struct.
-  """
-  if isinstance(host, unicode):
-    host = host.encode('idna')
-  if host == '*':
-    host = ''
-  if service == '*':
-    service = ''
-  if not host and not service:
-    raise gaierror(EAI_NONAME, 'nodename nor servname provided, or not known')
-
-  families = [f for f in _ADDRESS_FAMILY_MAP.keys()
-              if family in (AF_UNSPEC, f)]
-  if not families:
-    raise gaierror(EAI_FAMILY, 'ai_family not supported')
-
-  sock_proto = [sp for sp in _SOCK_PROTO_MAP.keys()
-                if socktype in (0, sp[0]) and proto in (0, sp[1])]
-  if not sock_proto:
-    raise gaierror(EAI_BADHINTS, 'Bad hints')
-
-  canon = ''
-  sock_proto_port = []
-  family_addresses = []
-
-
-  if host:
-    canon, _, addresses = _Resolve(
-        host, families,
-        use_dns=~(flags & AI_NUMERICHOST),
-        canonical=(flags & AI_CANONNAME))
-    family_addresses = [(f, a)
-                        for f in families
-                        for a in addresses if _IsAddr(f, a)]
-  else:
-    if flags & AI_PASSIVE:
-      canon = 'anyaddr'
-      if AF_INET6 in families:
-        family_addresses.append((AF_INET6, '::'))
-      if AF_INET in families:
-        family_addresses.append((AF_INET, '0.0.0.0'))
-    else:
-      canon = 'localhost'
-      if AF_INET6 in families:
-        family_addresses.append((AF_INET6, '::1'))
-      if AF_INET in families:
-        family_addresses.append((AF_INET, '127.0.0.1'))
-
-
-  if service:
-    sock_proto_port = [
-        sp + (_ResolveService(service, _SOCK_PROTO_MAP[sp],
-                              flags & AI_NUMERICSERV)[1],)
-        for sp in sock_proto]
-  else:
-    sock_proto_port = [sp + (0,) for sp in sock_proto]
-
-  return [(fa[0], spp[0], spp[1], canon, (fa[1], spp[2]))
-          for fa in family_addresses
-          for spp in sock_proto_port]
-
-
-def getnameinfo():
-
-  raise SocketApiNotImplementedError()
-
-
-def getdefaulttimeout():
-  """getdefaulttimeout() -> timeout
-
-  Returns the default timeout in floating seconds for new socket objects.
-  A value of None indicates that new socket objects have no timeout.
-  When the socket module is first imported, the default is None.
-  """
-
-  if _GLOBAL_TIMEOUT_VALUE < 0.0:
-    return None
-  return _GLOBAL_TIMEOUT_VALUE
-
-
-
-
-def setdefaulttimeout(timeout):
-  """setdefaulttimeout(timeout)
-
-  Set the default timeout in floating seconds for new socket objects.
-  A value of None indicates that new socket objects have no timeout.
-  When the socket module is first imported, the default is None.
-  """
-
-  if timeout is None:
-    timeout = -1.0
-  else:
-    try:
-      timeout = 0.0 + timeout
-    except TypeError:
-      raise TypeError('a float is required')
-    if timeout < 0.0:
-      raise ValueError('Timeout value out of range')
-
-
-  global _GLOBAL_TIMEOUT_VALUE
-  _GLOBAL_TIMEOUT_VALUE = timeout
-
-
-def _GetSocket(value):
-  if isinstance(value, (int, long)):
-    fileno = value
-  else:
-    try:
-      fileno = value.fileno()
-    except AttributeError:
-      raise TypeError('argument must be an int, or have a fileno() method.')
-  try:
-    return _GLOBAL_SOCKET_MAP[fileno]
-  except KeyError:
-    raise ValueError('select only supported on socket objects.')
-
-
-
-
-
-
-def select(rlist, wlist, xlist, timeout=None):
-  """select(rlist, wlist, xlist[, timeout]) -> (rlist, wlist, xlist)
-
-  Wait until one or more file descriptors are ready for some kind of I/O.
-  The first three arguments are sequences of file descriptors to be waited for:
-  rlist -- wait until ready for reading
-  wlist -- wait until ready for writing
-  xlist -- wait for an ``exceptional condition''
-  If only one kind of condition is required, pass [] for the other lists.
-  A file descriptor is either a socket or file object, or a small integer
-  gotten from a fileno() method call on one of those.
-
-  The optional 4th argument specifies a timeout in seconds; it may be
-  a floating point number to specify fractions of seconds.  If it is absent
-  or None, the call will never time out.
-
-  The return value is a tuple of three lists corresponding to the first three
-  arguments; each contains the subset of the corresponding file descriptors
-  that are ready.
-  """
-  if not rlist and not wlist and not xlist:
-    if timeout:
-      time.sleep(timeout)
-    return ([], [], [])
-
-  state_map = {}
-  rlist_out, wlist_out, xlist_out = [], [], []
-
-  def _SetState(request, sock, event):
-    socket_descriptor = sock._SocketDescriptor()
-    state = state_map.setdefault(socket_descriptor, { 'observed_events': 0, })
-
-    if ((event == POLLIN and sock._shutdown_read) or
-        (event == POLLOUT and sock._shutdown_write)):
-      state['observed_events'] |= event
-      request.set_timeout_seconds(0.0)
-      return
-
-    poll_event = state.get('poll_event')
-    if not poll_event:
-      poll_event = request.add_events()
-      poll_event.set_socket_descriptor(socket_descriptor)
-      poll_event.set_observed_events(0)
-      state['poll_event'] = poll_event
-    poll_event.set_requested_events(poll_event.requested_events()|event)
-
-  request = remote_socket_service_pb.PollRequest()
-  if timeout is not None:
-    request.set_timeout_seconds(timeout)
-
-  for value in rlist:
-    _SetState(request, _GetSocket(value), POLLIN)
-  for value in wlist:
-    _SetState(request, _GetSocket(value), POLLOUT)
-
-  if request.events_size():
-    reply = remote_socket_service_pb.PollReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Poll', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    for event in reply.events_list():
-      state_map[event.socket_descriptor()][
-          'observed_events'] |= event.observed_events()
-
-  for value in rlist:
-    state = state_map[_GetSocket(value)._SocketDescriptor()]
-    if state['observed_events'] & POLLIN:
-      rlist_out.append(value)
-  for value in wlist:
-    state = state_map[_GetSocket(value)._SocketDescriptor()]
-    if state['observed_events'] & POLLOUT:
-      wlist_out.append(value)
-
-  return (rlist_out, wlist_out, xlist_out)
-
-
-class socket(object):
-  """socket([family[, type[, proto]]]) -> socket object
-
-  Open a socket of the given type.  The family argument specifies the
-  address family; it defaults to AF_INET.  The type argument specifies
-  whether this is a stream (SOCK_STREAM, this is the default)
-  or datagram (SOCK_DGRAM) socket.  The protocol argument defaults to 0,
-  specifying the default protocol.  Keyword arguments are accepted.
-
-  A socket object represents one endpoint of a network connection.
-  """
-
-  def __del__(self):
-    if not self._serialized:
-      self.close()
-
-  def __getstate__(self):
-    self._serialized = True
-    return self.__dict__
-
-
-
-  def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _create=False):
-    if family not in (AF_INET, AF_INET6):
-      raise error(errno.EAFNOSUPPORT, os.strerror(errno.EAFNOSUPPORT))
-
-    if type not in (SOCK_STREAM, SOCK_DGRAM):
-      raise error(errno.EPROTONOSUPPORT, os.strerror(errno.EPROTONOSUPPORT))
-
-    if proto:
-      if ((proto not in (IPPROTO_TCP, IPPROTO_UDP)) or
-          (proto == IPPROTO_TCP and type != SOCK_STREAM) or
-          (proto == IPPROTO_UDP and type != SOCK_DGRAM)):
-        raise error(errno.EPROTONOSUPPORT, os.strerror(errno.EPROTONOSUPPORT))
-
-    self.family = family
-    self.type = type
-    self.proto = proto
-    self._created = False
-    self._fileno = None
-    self._serialized = False
-    self.settimeout(getdefaulttimeout())
-    self._Clear()
-
-    if _create:
-      self._CreateSocket()
-
-  def _Clear(self):
-    self._socket_descriptor = None
-    self._bound = False
-    self._listen = False
-    self._connected = False
-    self._connect_in_progress = False
-    self._shutdown_read = False
-    self._shutdown_write = False
-    self._setsockopt = []
-    self._stream_offset = 0
-
-  def _CreateSocket(self, address=None, bind_address=None,
-                    address_hostname_hint=None):
-    assert not self._created
-    self._created = True
-
-    request = remote_socket_service_pb.CreateSocketRequest()
-
-    if self.family == AF_INET:
-      request.set_family(remote_socket_service_pb.CreateSocketRequest.IPv4)
-    elif self.family == AF_INET6:
-      request.set_family(remote_socket_service_pb.CreateSocketRequest.IPv6)
-
-    if self.type == SOCK_STREAM:
-      request.set_protocol(remote_socket_service_pb.CreateSocketRequest.TCP)
-    elif self.type == SOCK_DGRAM:
-      request.set_protocol(remote_socket_service_pb.CreateSocketRequest.UDP)
-
-    if address:
-      assert self.gettimeout() is None, (
-          'Non-blocking connect not supported by CreateSocket')
-      self._SetProtoFromAddr(request.mutable_remote_ip(), address,
-                             address_hostname_hint)
-
-    if bind_address:
-      self._SetProtoFromAddr(request.mutable_proxy_external_ip(), bind_address)
-
-    for level, option, value in self._setsockopt:
-      o = request.add_socket_options()
-      o.set_level(level)
-      o.set_option(option)
-      if isinstance(value, (int, long)):
-        o.set_value(struct.pack('=L', value))
-      else:
-        o.set_value(value)
-    self._setsockopt = []
-
-    reply = remote_socket_service_pb.CreateSocketReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'CreateSocket', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    self._socket_descriptor = reply.socket_descriptor()
-    if bind_address:
-      self._bound = True
-    if address:
-      self._bound = True
-      self._connected = True
-
-  def _GetPackedAddr(self, addr):
-    if addr == '<broadcast>':
-      if self.family == AF_INET6:
-        return '\xff' * 16
-      else:
-        return '\xff' * 4
-    for res in getaddrinfo(addr, '0',
-                           self.family, self.type, self.proto,
-                           AI_NUMERICSERV|AI_PASSIVE):
-      return inet_pton(self.family, res[4][0])
-
-  def _SetProtoFromAddr(self, proto, address, hostname_hint=None):
-    address, port = address
-    proto.set_packed_address(self._GetPackedAddr(address))
-    proto.set_port(port)
-    proto.set_hostname_hint(hostname_hint or address)
-
-  def fileno(self):
-    """fileno() -> integer
-
-    Return the integer file descriptor of the socket.
-    """
-    global _GLOBAL_SOCKET_MAP
-    global _GLOBAL_SOCKET_NEXT_FILENO
-    if self._fileno is None:
-      self._fileno = _GLOBAL_SOCKET_NEXT_FILENO
-      _GLOBAL_SOCKET_NEXT_FILENO += 1
-      _GLOBAL_SOCKET_MAP[self._fileno] = self
-    assert _GLOBAL_SOCKET_MAP.get(self._fileno) == self, (
-        "fileno mismatch in _GLOBAL_SOCKET_MAP")
-    return self._fileno
-
-  def bind(self, address):
-    """bind(address)
-
-    Bind the socket to a local address.  For IP sockets, the address is a
-    pair (host, port); the host must refer to the local host. For raw packet
-    sockets the address is a tuple (ifname, proto [,pkttype [,hatype]])
-    """
-    if not self._created:
-      self._CreateSocket(bind_address=address)
-      return
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if self._bound:
-      raise error(errno.EINVAL, os.strerror(errno.EINVAL))
-
-    request = remote_socket_service_pb.BindRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    self._SetProtoFromAddr(request.mutable_proxy_external_ip(), address)
-
-    reply = remote_socket_service_pb.BindReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Bind', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-  def listen(self, backlog):
-    """listen(backlog)
-
-    Enable a server to accept connections.  The backlog argument must be at
-    least 1; it specifies the number of unaccepted connection that the system
-    will allow before refusing new connections.
-    """
-    if not self._created:
-      self._CreateSocket(bind_address=('', 0))
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if self._connected:
-      raise error(errno.EINVAL, os.strerror(errno.EINVAL))
-    if self.type != SOCK_STREAM:
-      raise error(errno.EOPNOTSUPP, os.strerror(errno.EOPNOTSUPP))
-    self._bound = True
-    self._listen = True
-
-    request = remote_socket_service_pb.ListenRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    request.set_backlog(backlog)
-
-    reply = remote_socket_service_pb.ListenReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Listen', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-  def accept(self):
-    """accept() -> (socket object, address info)
-
-    Wait for an incoming connection.  Return a new socket representing the
-    connection, and the address of the client.  For IP sockets, the address
-    info is a pair (hostaddr, port).
-    """
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if not self._listen:
-      raise error(errno.EINVAL, os.strerror(errno.EINVAL))
-
-    request = remote_socket_service_pb.AcceptRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    if self.gettimeout() is not None:
-      request.set_timeout_seconds(self.gettimeout())
-
-    reply = remote_socket_service_pb.AcceptReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Accept', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    ret = socket(self.family, self.type, self.proto)
-    ret._socket_descriptor = reply.new_socket_descriptor()
-    ret._created = True
-    ret._bound = True
-    ret._connected = True
-    return ret
-
-
-
-
-  def connect(self, address, _hostname_hint=None):
-    """connect(address)
-
-    Connect the socket to a remote address.  For IP sockets, the address
-    is a pair (host, port).
-    """
-    if not self._created:
-      if self.gettimeout() is None:
-        self._CreateSocket(address=address,
-                           address_hostname_hint=_hostname_hint)
-        return
-      else:
-
-
-
-
-        self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if self._connected:
-      raise error(errno.EISCONN, os.strerror(errno.EISCONN))
-
-    request = remote_socket_service_pb.ConnectRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    self._SetProtoFromAddr(request.mutable_remote_ip(), address, _hostname_hint)
-    if self.gettimeout() is not None:
-      request.set_timeout_seconds(self.gettimeout())
-
-    reply = remote_socket_service_pb.ConnectReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Connect', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      translated_e = _SystemExceptionFromAppError(e)
-      if translated_e.errno == errno.EISCONN:
-        self._bound = True
-        self._connected = True
-      elif translated_e.errno == errno.EINPROGRESS:
-        self._connect_in_progress = True
-      raise translated_e
-
-    self._bound = True
-    self._connected = True
-
-  def connect_ex(self, address):
-    """connect_ex(address) -> errno
-
-    This is like connect(address), but returns an error code (the errno value)
-    instead of raising an exception when an error occurs.
-    """
-    try:
-      self.connect(address)
-    except error, e:
-      return e.errno
-    return 0
-
-  def getpeername(self):
-    """getpeername() -> address info
-
-    Return the address of the remote endpoint.  For IP sockets, the address
-    info is a pair (hostaddr, port).
-    """
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if not (self._connected or self._connect_in_progress):
-      raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
-
-    request = remote_socket_service_pb.GetPeerNameRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-
-    reply = remote_socket_service_pb.GetPeerNameReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'GetPeerName', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    if self._connect_in_progress:
-      self._connect_in_progress = False
-      self._connected = True
-
-    return (
-        inet_ntop(self.family, reply.peer_ip().packed_address()),
-        reply.peer_ip().port())
-
-  def getsockname(self):
-    """getsockname() -> address info
-
-    Return the address of the local endpoint.  For IP sockets, the address
-    info is a pair (hostaddr, port).
-    """
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-
-    request = remote_socket_service_pb.GetSocketNameRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-
-    reply = remote_socket_service_pb.GetSocketNameReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'GetSocketName', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    return (
-        inet_ntop(self.family, reply.proxy_external_ip().packed_address()),
-        reply.proxy_external_ip().port())
-
-  def recv(self, buffersize, flags=0):
-    """recv(buffersize[, flags]) -> data
-
-    Receive up to buffersize bytes from the socket.  For the optional flags
-    argument, see the Unix manual.  When no data is available, block until
-    at least one byte is available or until the remote end is closed.  When
-    the remote end is closed and all data is read, return the empty string.
-    """
-    return self.recvfrom(buffersize, flags)[0]
-
-  def recv_into(self, buf, nbytes=0, flags=0):
-    """recv_into(buffer, [nbytes[, flags]]) -> nbytes_read
-
-    A version of recv() that stores its data into a buffer rather than
-    creating a new string.  Receive up to buffersize bytes from the socket.
-    If buffersize is not specified (or 0), receive up to the size available
-    in the given buffer.
-
-    See recv() for documentation about the flags.
-    """
-    raise SocketApiNotImplementedError()
-
-  def recvfrom(self, buffersize, flags=0):
-    """recvfrom(buffersize[, flags]) -> (data, address info)
-
-    Like recv(buffersize, flags) but also return the sender's address info.
-    """
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-
-    request = remote_socket_service_pb.ReceiveRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    request.set_data_size(buffersize)
-    request.set_flags(flags)
-    if self.type == SOCK_STREAM:
-      if not (self._connected or self._connect_in_progress):
-        raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
-    if self._shutdown_read:
-      request.set_timeout_seconds(0.0)
-    elif self.gettimeout() is not None:
-      request.set_timeout_seconds(self.gettimeout())
-
-    reply = remote_socket_service_pb.ReceiveReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Receive', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      e = _SystemExceptionFromAppError(e)
-      if not self._shutdown_read or e.errno != errno.EAGAIN:
-        raise e
-
-    if self._connect_in_progress:
-      self._connect_in_progress = False
-      self._connected = True
-
-    address = None
-    if reply.has_received_from():
-      address = (
-          inet_ntop(self.family, reply.received_from().packed_address()),
-          reply.received_from().port())
-
-    return reply.data(), address
-
-
-
-  def recvfrom_into(self, buffer, nbytes=0, flags=0):
-    """recvfrom_into(buffer[, nbytes[, flags]]) -> (nbytes, address info)
-
-    Like recv_into(buffer[, nbytes[, flags]]) but also return the
-    sender's address info.
-    """
-
-    raise SocketApiNotImplementedError()
-
-  def send(self, data, flags=0):
-    """send(data[, flags]) -> count
-
-    Send a data string to the socket.  For the optional flags
-    argument, see the Unix manual.  Return the number of bytes
-    sent; this may be less than len(data) if the network is busy.
-    """
-    return self.sendto(data, flags, None)
-
-  def sendall(self, data, flags=0):
-    """sendall(data[, flags])
-
-    Send a data string to the socket.  For the optional flags
-    argument, see the Unix manual.  This calls send() repeatedly
-    until all data is sent.  If an error occurs, it's impossible
-    to tell how much data has been sent.
-    """
-    offset = 0
-    while offset < len(data):
-      offset += self.sendto(data[offset:], flags, None)
-
-  def sendto(self, data, *args):
-    """sendto(data[, flags], address) -> count
-
-    Like send(data, flags) but allows specifying the destination address.
-    For IP sockets, the address is a pair (hostaddr, port).
-    """
-    if len(args) == 1:
-      flags, address = 0, args[0]
-    elif len(args) == 2:
-      flags, address = args
-
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if self._shutdown_write:
-      raise error(errno.EPIPE, os.strerror(errno.EPIPE))
-
-    request = remote_socket_service_pb.SendRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-
-    if len(data) > 512*1024:
-      request.set_data(data[:512*1024])
-    else:
-      request.set_data(data)
-    request.set_flags(flags)
-    request.set_stream_offset(self._stream_offset)
-
-    if address:
-      if self._connected:
-        raise error(errno.EISCONN, os.strerror(errno.EISCONN))
-      if self.type != SOCK_DGRAM:
-        raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
-      self._SetProtoFromAddr(request.mutable_send_to(), address)
-    else:
-      if not (self._connected or self._connect_in_progress):
-        raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
-
-    if self.gettimeout() is not None:
-      request.set_timeout_seconds(self.gettimeout())
-
-    reply = remote_socket_service_pb.SendReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Send', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    if self._connect_in_progress:
-      self._connect_in_progress = False
-      self._connected = True
-
-    nbytes = reply.data_sent()
-    assert nbytes >= 0
-    if self.type == SOCK_STREAM:
-      self._stream_offset += nbytes
-    return nbytes
-
-  def setblocking(self, block):
-    """setblocking(flag)
-
-    Set the socket to blocking (flag is true) or non-blocking (false).
-    setblocking(True) is equivalent to settimeout(None);
-    setblocking(False) is equivalent to settimeout(0.0).
-    """
-    if block:
-      self._timeout = -1.0
-    else:
-      self._timeout = 0.0
-
-  def settimeout(self, timeout):
-    """settimeout(timeout)
-
-    Set a timeout on socket operations.  'timeout' can be a float,
-    giving in seconds, or None.  Setting a timeout of None disables
-    the timeout feature and is equivalent to setblocking(1).
-    Setting a timeout of zero is the same as setblocking(0).
-    """
-    if timeout is None:
-      self._timeout = -1.0
-    else:
-      try:
-        self._timeout = 0.0 + timeout
-      except:
-        raise TypeError('a float is required')
-      if self._timeout < 0.0:
-        raise ValueError('Timeout value out of range')
-
-  def gettimeout(self):
-    """gettimeout() -> timeout
-
-    Returns the timeout in floating seconds associated with socket
-    operations. A timeout of None indicates that timeouts on socket
-    operations are disabled.
-    """
-    if self._timeout < 0.0:
-      return None
-    return self._timeout
-
-  def setsockopt(self, level, option, value):
-    """setsockopt(level, option, value)
-
-    Set a socket option.  See the Unix manual for level and option.
-    The value argument can either be an integer or a string.
-    """
-
-
-
-    if not self._created:
-      self._setsockopt.append((level, option, value))
-      self._CreateSocket()
-      return
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-
-    request = remote_socket_service_pb.SetSocketOptionsRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-
-    o = request.add_options()
-    o.set_level(level)
-    o.set_option(option)
-    if isinstance(value, (int, long)):
-      o.set_value(struct.pack('=L', value))
-    else:
-      o.set_value(value)
-
-    reply = remote_socket_service_pb.SetSocketOptionsReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'SetSocketOptions', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-  def getsockopt(self, level, option, buffersize=0):
-    """getsockopt(level, option[, buffersize]) -> value
-
-    Get a socket option.  See the Unix manual for level and option.
-    If a nonzero buffersize argument is given, the return value is a
-    string of that length; otherwise it is an integer.
-    """
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-
-    request = remote_socket_service_pb.GetSocketOptionsRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    o = request.add_options()
-    o.set_level(level)
-    o.set_option(option)
-    o.set_value('')
-
-    reply = remote_socket_service_pb.GetSocketOptionsReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'GetSocketOptions', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    if not buffersize:
-      return struct.unpack('=L', reply.options(0).value())[0]
-    else:
-      return reply.options(0).value()[:buffersize]
-
-  def shutdown(self, flag):
-    """shutdown(flag)
-
-    Shut down the reading side of the socket (flag == SHUT_RD), the writing side
-    of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR).
-    """
-    if not flag in (SHUT_RD, SHUT_WR, SHUT_RDWR):
-      raise error(errno.EINVAL, os.strerror(errno.EINVAL))
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    if (not self._connected or
-        (self._shutdown_read and flag in (SHUT_RD, SHUT_RDWR)) or
-        (self._shutdown_write and flag in (SHUT_RD, SHUT_RDWR))):
-      raise error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
-
-    request = remote_socket_service_pb.ShutDownRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-    request.set_how(flag)
-    request.set_send_offset(self._stream_offset)
-
-    reply = remote_socket_service_pb.ShutDownReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall(
-          'remote_socket', 'ShutDown', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    if flag == SHUT_RD or flag == SHUT_RDWR:
-      self._shutdown_read = True
-    if flag == SHUT_WR or flag == SHUT_RDWR:
-      self._shutdown_write = True
-
-  def close(self):
-    """close()
-
-    Close the socket.  It cannot be used after this call.
-    """
-    self._created = True
-    if not self._socket_descriptor:
-      return
-
-    request = remote_socket_service_pb.CloseRequest()
-    request.set_socket_descriptor(self._socket_descriptor)
-
-    reply = remote_socket_service_pb.CloseReply()
-
-    try:
-      apiproxy_stub_map.MakeSyncCall('remote_socket', 'Close', request, reply)
-    except apiproxy_errors.ApplicationError, e:
-      raise _SystemExceptionFromAppError(e)
-
-    self._Clear()
-
-  def _SocketDescriptor(self):
-    if not self._created:
-      self._CreateSocket()
-    if not self._socket_descriptor:
-      raise error(errno.EBADF, os.strerror(errno.EBADF))
-    return self._socket_descriptor
diff --git a/google/appengine/api/search/ExpressionLexer.py b/google/appengine/api/search/ExpressionLexer.py
index 66a8efb..ec658b8 100644
--- a/google/appengine/api/search/ExpressionLexer.py
+++ b/google/appengine/api/search/ExpressionLexer.py
@@ -1331,7 +1331,7 @@
 
 
             pass
-            self.match("==")
+            self.match(61)
 
 
 
diff --git a/google/appengine/api/search/ExpressionParser.py b/google/appengine/api/search/ExpressionParser.py
index b9bfe49..3aafc32 100644
--- a/google/appengine/api/search/ExpressionParser.py
+++ b/google/appengine/api/search/ExpressionParser.py
@@ -196,12 +196,12 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_conjunction_in_expression92)
+                self._state.following.append(self.FOLLOW_conjunction_in_expression90)
                 conjunction1 = self.conjunction()
 
                 self._state.following.pop()
                 self._adaptor.addChild(root_0, conjunction1.tree)
-                EOF2=self.match(self.input, EOF, self.FOLLOW_EOF_in_expression94)
+                EOF2=self.match(self.input, EOF, self.FOLLOW_EOF_in_expression92)
 
                 EOF2_tree = self._adaptor.createWithPayload(EOF2)
                 self._adaptor.addChild(root_0, EOF2_tree)
@@ -261,7 +261,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_conjunction_in_condExpr107)
+                self._state.following.append(self.FOLLOW_conjunction_in_condExpr105)
                 conjunction3 = self.conjunction()
 
                 self._state.following.pop()
@@ -275,12 +275,12 @@
                 if alt1 == 1:
 
                     pass
-                    COND4=self.match(self.input, COND, self.FOLLOW_COND_in_condExpr110)
+                    COND4=self.match(self.input, COND, self.FOLLOW_COND_in_condExpr108)
 
                     COND4_tree = self._adaptor.createWithPayload(COND4)
                     root_0 = self._adaptor.becomeRoot(COND4_tree, root_0)
 
-                    self._state.following.append(self.FOLLOW_addExpr_in_condExpr113)
+                    self._state.following.append(self.FOLLOW_addExpr_in_condExpr111)
                     addExpr5 = self.addExpr()
 
                     self._state.following.pop()
@@ -343,7 +343,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_disjunction_in_conjunction128)
+                self._state.following.append(self.FOLLOW_disjunction_in_conjunction126)
                 disjunction6 = self.disjunction()
 
                 self._state.following.pop()
@@ -360,12 +360,12 @@
                     if alt2 == 1:
 
                         pass
-                        AND7=self.match(self.input, AND, self.FOLLOW_AND_in_conjunction131)
+                        AND7=self.match(self.input, AND, self.FOLLOW_AND_in_conjunction129)
 
                         AND7_tree = self._adaptor.createWithPayload(AND7)
                         root_0 = self._adaptor.becomeRoot(AND7_tree, root_0)
 
-                        self._state.following.append(self.FOLLOW_disjunction_in_conjunction134)
+                        self._state.following.append(self.FOLLOW_disjunction_in_conjunction132)
                         disjunction8 = self.disjunction()
 
                         self._state.following.pop()
@@ -431,7 +431,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_negation_in_disjunction149)
+                self._state.following.append(self.FOLLOW_negation_in_disjunction147)
                 negation9 = self.negation()
 
                 self._state.following.pop()
@@ -460,7 +460,7 @@
                             raise mse
 
 
-                        self._state.following.append(self.FOLLOW_negation_in_disjunction161)
+                        self._state.following.append(self.FOLLOW_negation_in_disjunction159)
                         negation11 = self.negation()
 
                         self._state.following.pop()
@@ -539,7 +539,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_cmpExpr_in_negation176)
+                    self._state.following.append(self.FOLLOW_cmpExpr_in_negation174)
                     cmpExpr12 = self.cmpExpr()
 
                     self._state.following.pop()
@@ -551,12 +551,12 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    NOT13=self.match(self.input, NOT, self.FOLLOW_NOT_in_negation182)
+                    NOT13=self.match(self.input, NOT, self.FOLLOW_NOT_in_negation180)
 
                     NOT13_tree = self._adaptor.createWithPayload(NOT13)
                     root_0 = self._adaptor.becomeRoot(NOT13_tree, root_0)
 
-                    self._state.following.append(self.FOLLOW_cmpExpr_in_negation185)
+                    self._state.following.append(self.FOLLOW_cmpExpr_in_negation183)
                     cmpExpr14 = self.cmpExpr()
 
                     self._state.following.pop()
@@ -615,7 +615,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_addExpr_in_cmpExpr198)
+                self._state.following.append(self.FOLLOW_addExpr_in_cmpExpr196)
                 addExpr15 = self.addExpr()
 
                 self._state.following.pop()
@@ -629,12 +629,12 @@
                 if alt5 == 1:
 
                     pass
-                    self._state.following.append(self.FOLLOW_cmpOp_in_cmpExpr201)
+                    self._state.following.append(self.FOLLOW_cmpOp_in_cmpExpr199)
                     cmpOp16 = self.cmpOp()
 
                     self._state.following.pop()
                     root_0 = self._adaptor.becomeRoot(cmpOp16.tree, root_0)
-                    self._state.following.append(self.FOLLOW_addExpr_in_cmpExpr204)
+                    self._state.following.append(self.FOLLOW_addExpr_in_cmpExpr202)
                     addExpr17 = self.addExpr()
 
                     self._state.following.pop()
@@ -759,7 +759,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_multExpr_in_addExpr262)
+                self._state.following.append(self.FOLLOW_multExpr_in_addExpr260)
                 multExpr19 = self.multExpr()
 
                 self._state.following.pop()
@@ -776,12 +776,12 @@
                     if alt6 == 1:
 
                         pass
-                        self._state.following.append(self.FOLLOW_addOp_in_addExpr265)
+                        self._state.following.append(self.FOLLOW_addOp_in_addExpr263)
                         addOp20 = self.addOp()
 
                         self._state.following.pop()
                         root_0 = self._adaptor.becomeRoot(addOp20.tree, root_0)
-                        self._state.following.append(self.FOLLOW_multExpr_in_addExpr268)
+                        self._state.following.append(self.FOLLOW_multExpr_in_addExpr266)
                         multExpr21 = self.multExpr()
 
                         self._state.following.pop()
@@ -909,7 +909,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                self._state.following.append(self.FOLLOW_unary_in_multExpr302)
+                self._state.following.append(self.FOLLOW_unary_in_multExpr300)
                 unary23 = self.unary()
 
                 self._state.following.pop()
@@ -926,12 +926,12 @@
                     if alt7 == 1:
 
                         pass
-                        self._state.following.append(self.FOLLOW_multOp_in_multExpr305)
+                        self._state.following.append(self.FOLLOW_multOp_in_multExpr303)
                         multOp24 = self.multOp()
 
                         self._state.following.pop()
                         root_0 = self._adaptor.becomeRoot(multOp24.tree, root_0)
-                        self._state.following.append(self.FOLLOW_unary_in_multExpr308)
+                        self._state.following.append(self.FOLLOW_unary_in_multExpr306)
                         unary25 = self.unary()
 
                         self._state.following.pop()
@@ -1071,9 +1071,9 @@
                 if alt8 == 1:
 
                     pass
-                    MINUS27=self.match(self.input, MINUS, self.FOLLOW_MINUS_in_unary342)
+                    MINUS27=self.match(self.input, MINUS, self.FOLLOW_MINUS_in_unary340)
                     stream_MINUS.add(MINUS27)
-                    self._state.following.append(self.FOLLOW_atom_in_unary344)
+                    self._state.following.append(self.FOLLOW_atom_in_unary342)
                     atom28 = self.atom()
 
                     self._state.following.pop()
@@ -1114,7 +1114,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_atom_in_unary359)
+                    self._state.following.append(self.FOLLOW_atom_in_unary357)
                     atom29 = self.atom()
 
                     self._state.following.pop()
@@ -1186,7 +1186,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_var_in_atom372)
+                    self._state.following.append(self.FOLLOW_var_in_atom370)
                     var30 = self.var()
 
                     self._state.following.pop()
@@ -1198,7 +1198,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_num_in_atom378)
+                    self._state.following.append(self.FOLLOW_num_in_atom376)
                     num31 = self.num()
 
                     self._state.following.pop()
@@ -1210,7 +1210,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_str_in_atom384)
+                    self._state.following.append(self.FOLLOW_str_in_atom382)
                     str32 = self.str()
 
                     self._state.following.pop()
@@ -1222,7 +1222,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_fn_in_atom390)
+                    self._state.following.append(self.FOLLOW_fn_in_atom388)
                     fn33 = self.fn()
 
                     self._state.following.pop()
@@ -1232,14 +1232,14 @@
                 elif alt9 == 5:
 
                     pass
-                    LPAREN34=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_atom396)
+                    LPAREN34=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_atom394)
                     stream_LPAREN.add(LPAREN34)
-                    self._state.following.append(self.FOLLOW_conjunction_in_atom398)
+                    self._state.following.append(self.FOLLOW_conjunction_in_atom396)
                     conjunction35 = self.conjunction()
 
                     self._state.following.pop()
                     stream_conjunction.add(conjunction35.tree)
-                    RPAREN36=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_atom400)
+                    RPAREN36=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_atom398)
                     stream_RPAREN.add(RPAREN36)
 
 
@@ -1322,7 +1322,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_name_in_var417)
+                    self._state.following.append(self.FOLLOW_name_in_var415)
                     name37 = self.name()
 
                     self._state.following.pop()
@@ -1332,12 +1332,12 @@
                 elif alt10 == 2:
 
                     pass
-                    self._state.following.append(self.FOLLOW_name_in_var423)
+                    self._state.following.append(self.FOLLOW_name_in_var421)
                     name38 = self.name()
 
                     self._state.following.pop()
                     stream_name.add(name38.tree)
-                    self._state.following.append(self.FOLLOW_index_in_var425)
+                    self._state.following.append(self.FOLLOW_index_in_var423)
                     index39 = self.index()
 
                     self._state.following.pop()
@@ -1426,11 +1426,11 @@
 
 
                 pass
-                LSQUARE40=self.match(self.input, LSQUARE, self.FOLLOW_LSQUARE_in_index447)
+                LSQUARE40=self.match(self.input, LSQUARE, self.FOLLOW_LSQUARE_in_index445)
                 stream_LSQUARE.add(LSQUARE40)
-                x=self.match(self.input, INT, self.FOLLOW_INT_in_index451)
+                x=self.match(self.input, INT, self.FOLLOW_INT_in_index449)
                 stream_INT.add(x)
-                RSQUARE41=self.match(self.input, RSQUARE, self.FOLLOW_RSQUARE_in_index453)
+                RSQUARE41=self.match(self.input, RSQUARE, self.FOLLOW_RSQUARE_in_index451)
                 stream_RSQUARE.add(RSQUARE41)
 
 
@@ -1541,7 +1541,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    NAME42=self.match(self.input, NAME, self.FOLLOW_NAME_in_name471)
+                    NAME42=self.match(self.input, NAME, self.FOLLOW_NAME_in_name469)
 
                     NAME42_tree = self._adaptor.createWithPayload(NAME42)
                     self._adaptor.addChild(root_0, NAME42_tree)
@@ -1558,12 +1558,12 @@
                         if alt11 == 1:
 
                             pass
-                            char_literal43=self.match(self.input, 58, self.FOLLOW_58_in_name474)
+                            char_literal43=self.match(self.input, 58, self.FOLLOW_58_in_name472)
 
                             char_literal43_tree = self._adaptor.createWithPayload(char_literal43)
                             root_0 = self._adaptor.becomeRoot(char_literal43_tree, root_0)
 
-                            NAME44=self.match(self.input, NAME, self.FOLLOW_NAME_in_name477)
+                            NAME44=self.match(self.input, NAME, self.FOLLOW_NAME_in_name475)
 
                             NAME44_tree = self._adaptor.createWithPayload(NAME44)
                             self._adaptor.addChild(root_0, NAME44_tree)
@@ -1579,7 +1579,7 @@
                 elif alt12 == 2:
 
                     pass
-                    t=self.match(self.input, TEXT, self.FOLLOW_TEXT_in_name493)
+                    t=self.match(self.input, TEXT, self.FOLLOW_TEXT_in_name491)
                     stream_TEXT.add(t)
 
 
@@ -1609,7 +1609,7 @@
                 elif alt12 == 3:
 
                     pass
-                    t=self.match(self.input, HTML, self.FOLLOW_HTML_in_name506)
+                    t=self.match(self.input, HTML, self.FOLLOW_HTML_in_name504)
                     stream_HTML.add(t)
 
 
@@ -1639,7 +1639,7 @@
                 elif alt12 == 4:
 
                     pass
-                    t=self.match(self.input, ATOM, self.FOLLOW_ATOM_in_name519)
+                    t=self.match(self.input, ATOM, self.FOLLOW_ATOM_in_name517)
                     stream_ATOM.add(t)
 
 
@@ -1669,7 +1669,7 @@
                 elif alt12 == 5:
 
                     pass
-                    t=self.match(self.input, DATE, self.FOLLOW_DATE_in_name532)
+                    t=self.match(self.input, DATE, self.FOLLOW_DATE_in_name530)
                     stream_DATE.add(t)
 
 
@@ -1699,7 +1699,7 @@
                 elif alt12 == 6:
 
                     pass
-                    t=self.match(self.input, NUMBER, self.FOLLOW_NUMBER_in_name545)
+                    t=self.match(self.input, NUMBER, self.FOLLOW_NUMBER_in_name543)
                     stream_NUMBER.add(t)
 
 
@@ -1729,7 +1729,7 @@
                 elif alt12 == 7:
 
                     pass
-                    t=self.match(self.input, GEO, self.FOLLOW_GEO_in_name558)
+                    t=self.match(self.input, GEO, self.FOLLOW_GEO_in_name556)
                     stream_GEO.add(t)
 
 
@@ -1866,7 +1866,7 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                PHRASE46=self.match(self.input, PHRASE, self.FOLLOW_PHRASE_in_str595)
+                PHRASE46=self.match(self.input, PHRASE, self.FOLLOW_PHRASE_in_str593)
 
                 PHRASE46_tree = self._adaptor.createWithPayload(PHRASE46)
                 self._adaptor.addChild(root_0, PHRASE46_tree)
@@ -1934,14 +1934,14 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_fnName_in_fn608)
+                self._state.following.append(self.FOLLOW_fnName_in_fn606)
                 fnName47 = self.fnName()
 
                 self._state.following.pop()
                 stream_fnName.add(fnName47.tree)
-                LPAREN48=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_fn610)
+                LPAREN48=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_fn608)
                 stream_LPAREN.add(LPAREN48)
-                self._state.following.append(self.FOLLOW_condExpr_in_fn612)
+                self._state.following.append(self.FOLLOW_condExpr_in_fn610)
                 condExpr49 = self.condExpr()
 
                 self._state.following.pop()
@@ -1958,9 +1958,9 @@
                     if alt13 == 1:
 
                         pass
-                        COMMA50=self.match(self.input, COMMA, self.FOLLOW_COMMA_in_fn615)
+                        COMMA50=self.match(self.input, COMMA, self.FOLLOW_COMMA_in_fn613)
                         stream_COMMA.add(COMMA50)
-                        self._state.following.append(self.FOLLOW_condExpr_in_fn617)
+                        self._state.following.append(self.FOLLOW_condExpr_in_fn615)
                         condExpr51 = self.condExpr()
 
                         self._state.following.pop()
@@ -1971,7 +1971,7 @@
                         break
 
 
-                RPAREN52=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_fn621)
+                RPAREN52=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_fn619)
                 stream_RPAREN.add(RPAREN52)
 
 
@@ -2193,65 +2193,65 @@
     DFA10 = DFA
 
 
-    FOLLOW_conjunction_in_expression92 = frozenset([])
-    FOLLOW_EOF_in_expression94 = frozenset([1])
-    FOLLOW_conjunction_in_condExpr107 = frozenset([1, 6])
-    FOLLOW_COND_in_condExpr110 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_addExpr_in_condExpr113 = frozenset([1])
-    FOLLOW_disjunction_in_conjunction128 = frozenset([1, 7])
-    FOLLOW_AND_in_conjunction131 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_disjunction_in_conjunction134 = frozenset([1, 7])
-    FOLLOW_negation_in_disjunction149 = frozenset([1, 8, 9])
-    FOLLOW_set_in_disjunction152 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_negation_in_disjunction161 = frozenset([1, 8, 9])
-    FOLLOW_cmpExpr_in_negation176 = frozenset([1])
-    FOLLOW_NOT_in_negation182 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_cmpExpr_in_negation185 = frozenset([1])
-    FOLLOW_addExpr_in_cmpExpr198 = frozenset([1, 11, 12, 13, 14, 15, 16])
-    FOLLOW_cmpOp_in_cmpExpr201 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_addExpr_in_cmpExpr204 = frozenset([1])
+    FOLLOW_conjunction_in_expression90 = frozenset([])
+    FOLLOW_EOF_in_expression92 = frozenset([1])
+    FOLLOW_conjunction_in_condExpr105 = frozenset([1, 6])
+    FOLLOW_COND_in_condExpr108 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_addExpr_in_condExpr111 = frozenset([1])
+    FOLLOW_disjunction_in_conjunction126 = frozenset([1, 7])
+    FOLLOW_AND_in_conjunction129 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_disjunction_in_conjunction132 = frozenset([1, 7])
+    FOLLOW_negation_in_disjunction147 = frozenset([1, 8, 9])
+    FOLLOW_set_in_disjunction150 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_negation_in_disjunction159 = frozenset([1, 8, 9])
+    FOLLOW_cmpExpr_in_negation174 = frozenset([1])
+    FOLLOW_NOT_in_negation180 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_cmpExpr_in_negation183 = frozenset([1])
+    FOLLOW_addExpr_in_cmpExpr196 = frozenset([1, 11, 12, 13, 14, 15, 16])
+    FOLLOW_cmpOp_in_cmpExpr199 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_addExpr_in_cmpExpr202 = frozenset([1])
     FOLLOW_set_in_cmpOp0 = frozenset([1])
-    FOLLOW_multExpr_in_addExpr262 = frozenset([1, 17, 18])
-    FOLLOW_addOp_in_addExpr265 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_multExpr_in_addExpr268 = frozenset([1, 17, 18])
+    FOLLOW_multExpr_in_addExpr260 = frozenset([1, 17, 18])
+    FOLLOW_addOp_in_addExpr263 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_multExpr_in_addExpr266 = frozenset([1, 17, 18])
     FOLLOW_set_in_addOp0 = frozenset([1])
-    FOLLOW_unary_in_multExpr302 = frozenset([1, 19, 20])
-    FOLLOW_multOp_in_multExpr305 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_unary_in_multExpr308 = frozenset([1, 19, 20])
+    FOLLOW_unary_in_multExpr300 = frozenset([1, 19, 20])
+    FOLLOW_multOp_in_multExpr303 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_unary_in_multExpr306 = frozenset([1, 19, 20])
     FOLLOW_set_in_multOp0 = frozenset([1])
-    FOLLOW_MINUS_in_unary342 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_atom_in_unary344 = frozenset([1])
-    FOLLOW_atom_in_unary359 = frozenset([1])
-    FOLLOW_var_in_atom372 = frozenset([1])
-    FOLLOW_num_in_atom378 = frozenset([1])
-    FOLLOW_str_in_atom384 = frozenset([1])
-    FOLLOW_fn_in_atom390 = frozenset([1])
-    FOLLOW_LPAREN_in_atom396 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_conjunction_in_atom398 = frozenset([22])
-    FOLLOW_RPAREN_in_atom400 = frozenset([1])
-    FOLLOW_name_in_var417 = frozenset([1])
-    FOLLOW_name_in_var423 = frozenset([23])
-    FOLLOW_index_in_var425 = frozenset([1])
-    FOLLOW_LSQUARE_in_index447 = frozenset([24])
-    FOLLOW_INT_in_index451 = frozenset([25])
-    FOLLOW_RSQUARE_in_index453 = frozenset([1])
-    FOLLOW_NAME_in_name471 = frozenset([1, 58])
-    FOLLOW_58_in_name474 = frozenset([26])
-    FOLLOW_NAME_in_name477 = frozenset([1, 58])
-    FOLLOW_TEXT_in_name493 = frozenset([1])
-    FOLLOW_HTML_in_name506 = frozenset([1])
-    FOLLOW_ATOM_in_name519 = frozenset([1])
-    FOLLOW_DATE_in_name532 = frozenset([1])
-    FOLLOW_NUMBER_in_name545 = frozenset([1])
-    FOLLOW_GEO_in_name558 = frozenset([1])
+    FOLLOW_MINUS_in_unary340 = frozenset([18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_atom_in_unary342 = frozenset([1])
+    FOLLOW_atom_in_unary357 = frozenset([1])
+    FOLLOW_var_in_atom370 = frozenset([1])
+    FOLLOW_num_in_atom376 = frozenset([1])
+    FOLLOW_str_in_atom382 = frozenset([1])
+    FOLLOW_fn_in_atom388 = frozenset([1])
+    FOLLOW_LPAREN_in_atom394 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_conjunction_in_atom396 = frozenset([22])
+    FOLLOW_RPAREN_in_atom398 = frozenset([1])
+    FOLLOW_name_in_var415 = frozenset([1])
+    FOLLOW_name_in_var421 = frozenset([23])
+    FOLLOW_index_in_var423 = frozenset([1])
+    FOLLOW_LSQUARE_in_index445 = frozenset([24])
+    FOLLOW_INT_in_index449 = frozenset([25])
+    FOLLOW_RSQUARE_in_index451 = frozenset([1])
+    FOLLOW_NAME_in_name469 = frozenset([1, 58])
+    FOLLOW_58_in_name472 = frozenset([26])
+    FOLLOW_NAME_in_name475 = frozenset([1, 58])
+    FOLLOW_TEXT_in_name491 = frozenset([1])
+    FOLLOW_HTML_in_name504 = frozenset([1])
+    FOLLOW_ATOM_in_name517 = frozenset([1])
+    FOLLOW_DATE_in_name530 = frozenset([1])
+    FOLLOW_NUMBER_in_name543 = frozenset([1])
+    FOLLOW_GEO_in_name556 = frozenset([1])
     FOLLOW_set_in_num0 = frozenset([1])
-    FOLLOW_PHRASE_in_str595 = frozenset([1])
-    FOLLOW_fnName_in_fn608 = frozenset([21])
-    FOLLOW_LPAREN_in_fn610 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_condExpr_in_fn612 = frozenset([22, 35])
-    FOLLOW_COMMA_in_fn615 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
-    FOLLOW_condExpr_in_fn617 = frozenset([22, 35])
-    FOLLOW_RPAREN_in_fn621 = frozenset([1])
+    FOLLOW_PHRASE_in_str593 = frozenset([1])
+    FOLLOW_fnName_in_fn606 = frozenset([21])
+    FOLLOW_LPAREN_in_fn608 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_condExpr_in_fn610 = frozenset([22, 35])
+    FOLLOW_COMMA_in_fn613 = frozenset([10, 18, 21, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45])
+    FOLLOW_condExpr_in_fn615 = frozenset([22, 35])
+    FOLLOW_RPAREN_in_fn619 = frozenset([1])
     FOLLOW_set_in_fnName0 = frozenset([1])
 
 
diff --git a/google/appengine/api/search/QueryLexer.py b/google/appengine/api/search/QueryLexer.py
index 105df4c..9fccaa3 100644
--- a/google/appengine/api/search/QueryLexer.py
+++ b/google/appengine/api/search/QueryLexer.py
@@ -26,7 +26,6 @@
 
 
 FUNCTION=7
-LT=17
 GEO_POINT_FN=29
 FIX=30
 ESC=34
@@ -48,6 +47,7 @@
 EQ=21
 NE=20
 T__43=43
+LESSTHAN=17
 GE=18
 T__44=44
 T__45=45
@@ -530,10 +530,10 @@
 
 
 
-    def mLT(self, ):
+    def mLESSTHAN(self, ):
 
         try:
-            _type = LT
+            _type = LESSTHAN
             _channel = DEFAULT_CHANNEL
 
 
@@ -1319,7 +1319,7 @@
         elif alt9 == 16:
 
             pass
-            self.mLT()
+            self.mLESSTHAN()
 
 
         elif alt9 == 17:
diff --git a/google/appengine/api/search/QueryParser.py b/google/appengine/api/search/QueryParser.py
index 6428d5e..83418a8 100644
--- a/google/appengine/api/search/QueryParser.py
+++ b/google/appengine/api/search/QueryParser.py
@@ -27,15 +27,10 @@
 
 
 
-
-
-
-
 HIDDEN = BaseRecognizer.HIDDEN
 
 
 FUNCTION=7
-LT=17
 GEO_POINT_FN=29
 FIX=30
 ESC=34
@@ -57,6 +52,7 @@
 EQ=21
 NE=20
 T__43=43
+LESSTHAN=17
 GE=18
 T__44=44
 T__45=45
@@ -82,7 +78,7 @@
 tokenNames = [
     "<invalid>", "<EOR>", "<DOWN>", "<UP>",
     "ARGS", "CONJUNCTION", "DISJUNCTION", "FUNCTION", "FUZZY", "GLOBAL",
-    "LITERAL", "NEGATION", "STRING", "SEQUENCE", "VALUE", "WS", "LE", "LT",
+    "LITERAL", "NEGATION", "STRING", "SEQUENCE", "VALUE", "WS", "LE", "LESSTHAN",
     "GE", "GT", "NE", "EQ", "HAS", "LPAREN", "RPAREN", "AND", "OR", "NOT",
     "DISTANCE_FN", "GEO_POINT_FN", "FIX", "REWRITE", "TEXT", "QUOTE", "ESC",
     "UNICODE_ESC", "OCTAL_ESC", "CHAR_SEQ", "DIGIT", "MID_CHAR", "ESCAPED_CHAR",
@@ -215,7 +211,7 @@
                     if alt1 == 1:
 
                         pass
-                        WS1=self.match(self.input, WS, self.FOLLOW_WS_in_query116)
+                        WS1=self.match(self.input, WS, self.FOLLOW_WS_in_query112)
                         stream_WS.add(WS1)
 
 
@@ -223,7 +219,7 @@
                         break
 
 
-                self._state.following.append(self.FOLLOW_expression_in_query119)
+                self._state.following.append(self.FOLLOW_expression_in_query115)
                 expression2 = self.expression()
 
                 self._state.following.pop()
@@ -240,7 +236,7 @@
                     if alt2 == 1:
 
                         pass
-                        WS3=self.match(self.input, WS, self.FOLLOW_WS_in_query121)
+                        WS3=self.match(self.input, WS, self.FOLLOW_WS_in_query117)
                         stream_WS.add(WS3)
 
 
@@ -248,7 +244,7 @@
                         break
 
 
-                EOF4=self.match(self.input, EOF, self.FOLLOW_EOF_in_query124)
+                EOF4=self.match(self.input, EOF, self.FOLLOW_EOF_in_query120)
                 stream_EOF.add(EOF4)
 
 
@@ -327,7 +323,7 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_sequence_in_expression143)
+                self._state.following.append(self.FOLLOW_sequence_in_expression139)
                 sequence5 = self.sequence()
 
                 self._state.following.pop()
@@ -339,12 +335,12 @@
                     if alt3 == 1:
 
                         pass
-                        self._state.following.append(self.FOLLOW_andOp_in_expression146)
+                        self._state.following.append(self.FOLLOW_andOp_in_expression142)
                         andOp6 = self.andOp()
 
                         self._state.following.pop()
                         stream_andOp.add(andOp6.tree)
-                        self._state.following.append(self.FOLLOW_sequence_in_expression148)
+                        self._state.following.append(self.FOLLOW_sequence_in_expression144)
                         sequence7 = self.sequence()
 
                         self._state.following.pop()
@@ -446,7 +442,7 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_factor_in_sequence174)
+                self._state.following.append(self.FOLLOW_factor_in_sequence170)
                 factor8 = self.factor()
 
                 self._state.following.pop()
@@ -471,7 +467,7 @@
                             if alt4 == 1:
 
                                 pass
-                                WS9=self.match(self.input, WS, self.FOLLOW_WS_in_sequence177)
+                                WS9=self.match(self.input, WS, self.FOLLOW_WS_in_sequence173)
                                 stream_WS.add(WS9)
 
 
@@ -485,7 +481,7 @@
                             cnt4 += 1
 
 
-                        self._state.following.append(self.FOLLOW_factor_in_sequence180)
+                        self._state.following.append(self.FOLLOW_factor_in_sequence176)
                         factor10 = self.factor()
 
                         self._state.following.pop()
@@ -587,7 +583,7 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_term_in_factor206)
+                self._state.following.append(self.FOLLOW_term_in_factor202)
                 term11 = self.term()
 
                 self._state.following.pop()
@@ -599,12 +595,12 @@
                     if alt6 == 1:
 
                         pass
-                        self._state.following.append(self.FOLLOW_orOp_in_factor209)
+                        self._state.following.append(self.FOLLOW_orOp_in_factor205)
                         orOp12 = self.orOp()
 
                         self._state.following.pop()
                         stream_orOp.add(orOp12.tree)
-                        self._state.following.append(self.FOLLOW_term_in_factor211)
+                        self._state.following.append(self.FOLLOW_term_in_factor207)
                         term13 = self.term()
 
                         self._state.following.pop()
@@ -719,12 +715,12 @@
                 if alt7 == 1:
 
                     pass
-                    self._state.following.append(self.FOLLOW_notOp_in_term235)
+                    self._state.following.append(self.FOLLOW_notOp_in_term231)
                     notOp14 = self.notOp()
 
                     self._state.following.pop()
                     stream_notOp.add(notOp14.tree)
-                    self._state.following.append(self.FOLLOW_primitive_in_term237)
+                    self._state.following.append(self.FOLLOW_primitive_in_term233)
                     primitive15 = self.primitive()
 
                     self._state.following.pop()
@@ -765,7 +761,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_primitive_in_term251)
+                    self._state.following.append(self.FOLLOW_primitive_in_term247)
                     primitive16 = self.primitive()
 
                     self._state.following.pop()
@@ -809,7 +805,7 @@
 
         root_0 = None
 
-        restrict17 = None
+        restriction17 = None
 
         composite18 = None
 
@@ -827,11 +823,11 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_restrict_in_primitive267)
-                    restrict17 = self.restrict()
+                    self._state.following.append(self.FOLLOW_restriction_in_primitive263)
+                    restriction17 = self.restriction()
 
                     self._state.following.pop()
-                    self._adaptor.addChild(root_0, restrict17.tree)
+                    self._adaptor.addChild(root_0, restriction17.tree)
 
 
                 elif alt8 == 2:
@@ -839,7 +835,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_composite_in_primitive273)
+                    self._state.following.append(self.FOLLOW_composite_in_primitive269)
                     composite18 = self.composite()
 
                     self._state.following.pop()
@@ -849,7 +845,7 @@
                 elif alt8 == 3:
 
                     pass
-                    self._state.following.append(self.FOLLOW_item_in_primitive279)
+                    self._state.following.append(self.FOLLOW_item_in_primitive275)
                     item19 = self.item()
 
                     self._state.following.pop()
@@ -905,7 +901,7 @@
 
 
 
-    class restrict_return(ParserRuleReturnScope):
+    class restriction_return(ParserRuleReturnScope):
         def __init__(self):
             ParserRuleReturnScope.__init__(self)
 
@@ -916,9 +912,9 @@
 
 
 
-    def restrict(self, ):
+    def restriction(self, ):
 
-        retval = self.restrict_return()
+        retval = self.restriction_return()
         retval.start = self.input.LT(1)
 
         root_0 = None
@@ -938,17 +934,17 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_comparable_in_restrict305)
+                self._state.following.append(self.FOLLOW_comparable_in_restriction301)
                 comparable20 = self.comparable()
 
                 self._state.following.pop()
                 stream_comparable.add(comparable20.tree)
-                self._state.following.append(self.FOLLOW_comparator_in_restrict307)
+                self._state.following.append(self.FOLLOW_comparator_in_restriction303)
                 comparator21 = self.comparator()
 
                 self._state.following.pop()
                 stream_comparator.add(comparator21.tree)
-                self._state.following.append(self.FOLLOW_arg_in_restrict309)
+                self._state.following.append(self.FOLLOW_arg_in_restriction305)
                 arg22 = self.arg()
 
                 self._state.following.pop()
@@ -1031,9 +1027,9 @@
         WS23_tree = None
         WS24_tree = None
         stream_HAS = RewriteRuleTokenStream(self._adaptor, "token HAS")
+        stream_LESSTHAN = RewriteRuleTokenStream(self._adaptor, "token LESSTHAN")
         stream_GE = RewriteRuleTokenStream(self._adaptor, "token GE")
         stream_GT = RewriteRuleTokenStream(self._adaptor, "token GT")
-        stream_LT = RewriteRuleTokenStream(self._adaptor, "token LT")
         stream_WS = RewriteRuleTokenStream(self._adaptor, "token WS")
         stream_EQ = RewriteRuleTokenStream(self._adaptor, "token EQ")
         stream_LE = RewriteRuleTokenStream(self._adaptor, "token LE")
@@ -1056,7 +1052,7 @@
                     if alt9 == 1:
 
                         pass
-                        WS23=self.match(self.input, WS, self.FOLLOW_WS_in_comparator333)
+                        WS23=self.match(self.input, WS, self.FOLLOW_WS_in_comparator329)
                         stream_WS.add(WS23)
 
 
@@ -1069,7 +1065,7 @@
                 LA10 = self.input.LA(1)
                 if LA10 == LE:
                     alt10 = 1
-                elif LA10 == LT:
+                elif LA10 == LESSTHAN:
                     alt10 = 2
                 elif LA10 == GE:
                     alt10 = 3
@@ -1089,49 +1085,49 @@
                 if alt10 == 1:
 
                     pass
-                    x=self.match(self.input, LE, self.FOLLOW_LE_in_comparator339)
+                    x=self.match(self.input, LE, self.FOLLOW_LE_in_comparator335)
                     stream_LE.add(x)
 
 
                 elif alt10 == 2:
 
                     pass
-                    x=self.match(self.input, LT, self.FOLLOW_LT_in_comparator345)
-                    stream_LT.add(x)
+                    x=self.match(self.input, LESSTHAN, self.FOLLOW_LESSTHAN_in_comparator341)
+                    stream_LESSTHAN.add(x)
 
 
                 elif alt10 == 3:
 
                     pass
-                    x=self.match(self.input, GE, self.FOLLOW_GE_in_comparator351)
+                    x=self.match(self.input, GE, self.FOLLOW_GE_in_comparator347)
                     stream_GE.add(x)
 
 
                 elif alt10 == 4:
 
                     pass
-                    x=self.match(self.input, GT, self.FOLLOW_GT_in_comparator357)
+                    x=self.match(self.input, GT, self.FOLLOW_GT_in_comparator353)
                     stream_GT.add(x)
 
 
                 elif alt10 == 5:
 
                     pass
-                    x=self.match(self.input, NE, self.FOLLOW_NE_in_comparator363)
+                    x=self.match(self.input, NE, self.FOLLOW_NE_in_comparator359)
                     stream_NE.add(x)
 
 
                 elif alt10 == 6:
 
                     pass
-                    x=self.match(self.input, EQ, self.FOLLOW_EQ_in_comparator369)
+                    x=self.match(self.input, EQ, self.FOLLOW_EQ_in_comparator365)
                     stream_EQ.add(x)
 
 
                 elif alt10 == 7:
 
                     pass
-                    x=self.match(self.input, HAS, self.FOLLOW_HAS_in_comparator375)
+                    x=self.match(self.input, HAS, self.FOLLOW_HAS_in_comparator371)
                     stream_HAS.add(x)
 
 
@@ -1148,7 +1144,7 @@
                     if alt11 == 1:
 
                         pass
-                        WS24=self.match(self.input, WS, self.FOLLOW_WS_in_comparator378)
+                        WS24=self.match(self.input, WS, self.FOLLOW_WS_in_comparator374)
                         stream_WS.add(WS24)
 
 
@@ -1230,11 +1226,10 @@
             try:
 
                 alt12 = 2
-                LA12_0 = self.input.LA(1)
-
-                if ((FIX <= LA12_0 <= QUOTE)) :
+                LA12 = self.input.LA(1)
+                if LA12 == FIX or LA12 == REWRITE or LA12 == TEXT or LA12 == QUOTE:
                     alt12 = 1
-                elif ((DISTANCE_FN <= LA12_0 <= GEO_POINT_FN)) :
+                elif LA12 == DISTANCE_FN:
                     LA12_2 = self.input.LA(2)
 
                     if ((WS <= LA12_2 <= HAS)) :
@@ -1246,6 +1241,18 @@
 
                         raise nvae
 
+                elif LA12 == GEO_POINT_FN:
+                    LA12_3 = self.input.LA(2)
+
+                    if ((WS <= LA12_3 <= HAS)) :
+                        alt12 = 1
+                    elif (LA12_3 == LPAREN) :
+                        alt12 = 2
+                    else:
+                        nvae = NoViableAltException("", 12, 3, self.input)
+
+                        raise nvae
+
                 else:
                     nvae = NoViableAltException("", 12, 0, self.input)
 
@@ -1256,7 +1263,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_item_in_comparable400)
+                    self._state.following.append(self.FOLLOW_item_in_comparable396)
                     item25 = self.item()
 
                     self._state.following.pop()
@@ -1268,7 +1275,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_function_in_comparable406)
+                    self._state.following.append(self.FOLLOW_function_in_comparable402)
                     function26 = self.function()
 
                     self._state.following.pop()
@@ -1330,19 +1337,19 @@
 
 
                 pass
-                self._state.following.append(self.FOLLOW_fnname_in_function421)
+                self._state.following.append(self.FOLLOW_fnname_in_function417)
                 fnname27 = self.fnname()
 
                 self._state.following.pop()
                 stream_fnname.add(fnname27.tree)
-                LPAREN28=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_function423)
+                LPAREN28=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_function419)
                 stream_LPAREN.add(LPAREN28)
-                self._state.following.append(self.FOLLOW_arglist_in_function425)
+                self._state.following.append(self.FOLLOW_arglist_in_function421)
                 arglist29 = self.arglist()
 
                 self._state.following.pop()
                 stream_arglist.add(arglist29.tree)
-                RPAREN30=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_function427)
+                RPAREN30=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_function423)
                 stream_RPAREN.add(RPAREN30)
 
 
@@ -1447,7 +1454,7 @@
                 if alt14 == 1:
 
                     pass
-                    self._state.following.append(self.FOLLOW_arg_in_arglist456)
+                    self._state.following.append(self.FOLLOW_arg_in_arglist452)
                     arg31 = self.arg()
 
                     self._state.following.pop()
@@ -1464,12 +1471,12 @@
                         if alt13 == 1:
 
                             pass
-                            self._state.following.append(self.FOLLOW_sep_in_arglist459)
+                            self._state.following.append(self.FOLLOW_sep_in_arglist455)
                             sep32 = self.sep()
 
                             self._state.following.pop()
                             stream_sep.add(sep32.tree)
-                            self._state.following.append(self.FOLLOW_arg_in_arglist461)
+                            self._state.following.append(self.FOLLOW_arg_in_arglist457)
                             arg33 = self.arg()
 
                             self._state.following.pop()
@@ -1568,7 +1575,7 @@
                 LA15 = self.input.LA(1)
                 if LA15 == FIX or LA15 == REWRITE or LA15 == TEXT or LA15 == QUOTE:
                     alt15 = 1
-                elif LA15 == DISTANCE_FN or LA15 == GEO_POINT_FN:
+                elif LA15 == DISTANCE_FN:
                     LA15_2 = self.input.LA(2)
 
                     if (LA15_2 == EOF or LA15_2 == WS or LA15_2 == RPAREN or LA15_2 == 44) :
@@ -1580,6 +1587,18 @@
 
                         raise nvae
 
+                elif LA15 == GEO_POINT_FN:
+                    LA15_3 = self.input.LA(2)
+
+                    if (LA15_3 == EOF or LA15_3 == WS or LA15_3 == RPAREN or LA15_3 == 44) :
+                        alt15 = 1
+                    elif (LA15_3 == LPAREN) :
+                        alt15 = 3
+                    else:
+                        nvae = NoViableAltException("", 15, 3, self.input)
+
+                        raise nvae
+
                 elif LA15 == LPAREN:
                     alt15 = 2
                 else:
@@ -1592,7 +1611,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_item_in_arg486)
+                    self._state.following.append(self.FOLLOW_item_in_arg482)
                     item34 = self.item()
 
                     self._state.following.pop()
@@ -1604,7 +1623,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_composite_in_arg492)
+                    self._state.following.append(self.FOLLOW_composite_in_arg488)
                     composite35 = self.composite()
 
                     self._state.following.pop()
@@ -1616,7 +1635,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    self._state.following.append(self.FOLLOW_function_in_arg498)
+                    self._state.following.append(self.FOLLOW_function_in_arg494)
                     function36 = self.function()
 
                     self._state.following.pop()
@@ -1688,7 +1707,7 @@
                     if alt16 == 1:
 
                         pass
-                        WS37=self.match(self.input, WS, self.FOLLOW_WS_in_andOp512)
+                        WS37=self.match(self.input, WS, self.FOLLOW_WS_in_andOp508)
 
                         WS37_tree = self._adaptor.createWithPayload(WS37)
                         self._adaptor.addChild(root_0, WS37_tree)
@@ -1705,7 +1724,7 @@
                     cnt16 += 1
 
 
-                AND38=self.match(self.input, AND, self.FOLLOW_AND_in_andOp515)
+                AND38=self.match(self.input, AND, self.FOLLOW_AND_in_andOp511)
 
                 AND38_tree = self._adaptor.createWithPayload(AND38)
                 self._adaptor.addChild(root_0, AND38_tree)
@@ -1723,7 +1742,7 @@
                     if alt17 == 1:
 
                         pass
-                        WS39=self.match(self.input, WS, self.FOLLOW_WS_in_andOp517)
+                        WS39=self.match(self.input, WS, self.FOLLOW_WS_in_andOp513)
 
                         WS39_tree = self._adaptor.createWithPayload(WS39)
                         self._adaptor.addChild(root_0, WS39_tree)
@@ -1808,7 +1827,7 @@
                     if alt18 == 1:
 
                         pass
-                        WS40=self.match(self.input, WS, self.FOLLOW_WS_in_orOp532)
+                        WS40=self.match(self.input, WS, self.FOLLOW_WS_in_orOp528)
 
                         WS40_tree = self._adaptor.createWithPayload(WS40)
                         self._adaptor.addChild(root_0, WS40_tree)
@@ -1825,7 +1844,7 @@
                     cnt18 += 1
 
 
-                OR41=self.match(self.input, OR, self.FOLLOW_OR_in_orOp535)
+                OR41=self.match(self.input, OR, self.FOLLOW_OR_in_orOp531)
 
                 OR41_tree = self._adaptor.createWithPayload(OR41)
                 self._adaptor.addChild(root_0, OR41_tree)
@@ -1843,7 +1862,7 @@
                     if alt19 == 1:
 
                         pass
-                        WS42=self.match(self.input, WS, self.FOLLOW_WS_in_orOp537)
+                        WS42=self.match(self.input, WS, self.FOLLOW_WS_in_orOp533)
 
                         WS42_tree = self._adaptor.createWithPayload(WS42)
                         self._adaptor.addChild(root_0, WS42_tree)
@@ -1928,7 +1947,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    char_literal43=self.match(self.input, 43, self.FOLLOW_43_in_notOp552)
+                    char_literal43=self.match(self.input, 43, self.FOLLOW_43_in_notOp548)
 
                     char_literal43_tree = self._adaptor.createWithPayload(char_literal43)
                     self._adaptor.addChild(root_0, char_literal43_tree)
@@ -1940,7 +1959,7 @@
                     pass
                     root_0 = self._adaptor.nil()
 
-                    NOT44=self.match(self.input, NOT, self.FOLLOW_NOT_in_notOp558)
+                    NOT44=self.match(self.input, NOT, self.FOLLOW_NOT_in_notOp554)
 
                     NOT44_tree = self._adaptor.createWithPayload(NOT44)
                     self._adaptor.addChild(root_0, NOT44_tree)
@@ -1958,7 +1977,7 @@
                         if alt20 == 1:
 
                             pass
-                            WS45=self.match(self.input, WS, self.FOLLOW_WS_in_notOp560)
+                            WS45=self.match(self.input, WS, self.FOLLOW_WS_in_notOp556)
 
                             WS45_tree = self._adaptor.createWithPayload(WS45)
                             self._adaptor.addChild(root_0, WS45_tree)
@@ -2041,7 +2060,7 @@
                     if alt22 == 1:
 
                         pass
-                        WS46=self.match(self.input, WS, self.FOLLOW_WS_in_sep575)
+                        WS46=self.match(self.input, WS, self.FOLLOW_WS_in_sep571)
 
                         WS46_tree = self._adaptor.createWithPayload(WS46)
                         self._adaptor.addChild(root_0, WS46_tree)
@@ -2052,7 +2071,7 @@
                         break
 
 
-                char_literal47=self.match(self.input, 44, self.FOLLOW_44_in_sep578)
+                char_literal47=self.match(self.input, 44, self.FOLLOW_44_in_sep574)
 
                 char_literal47_tree = self._adaptor.createWithPayload(char_literal47)
                 self._adaptor.addChild(root_0, char_literal47_tree)
@@ -2069,7 +2088,7 @@
                     if alt23 == 1:
 
                         pass
-                        WS48=self.match(self.input, WS, self.FOLLOW_WS_in_sep580)
+                        WS48=self.match(self.input, WS, self.FOLLOW_WS_in_sep576)
 
                         WS48_tree = self._adaptor.createWithPayload(WS48)
                         self._adaptor.addChild(root_0, WS48_tree)
@@ -2202,7 +2221,7 @@
 
 
                 pass
-                LPAREN50=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_composite616)
+                LPAREN50=self.match(self.input, LPAREN, self.FOLLOW_LPAREN_in_composite612)
                 stream_LPAREN.add(LPAREN50)
 
                 while True:
@@ -2216,7 +2235,7 @@
                     if alt24 == 1:
 
                         pass
-                        WS51=self.match(self.input, WS, self.FOLLOW_WS_in_composite618)
+                        WS51=self.match(self.input, WS, self.FOLLOW_WS_in_composite614)
                         stream_WS.add(WS51)
 
 
@@ -2224,7 +2243,7 @@
                         break
 
 
-                self._state.following.append(self.FOLLOW_expression_in_composite621)
+                self._state.following.append(self.FOLLOW_expression_in_composite617)
                 expression52 = self.expression()
 
                 self._state.following.pop()
@@ -2241,7 +2260,7 @@
                     if alt25 == 1:
 
                         pass
-                        WS53=self.match(self.input, WS, self.FOLLOW_WS_in_composite623)
+                        WS53=self.match(self.input, WS, self.FOLLOW_WS_in_composite619)
                         stream_WS.add(WS53)
 
 
@@ -2249,7 +2268,7 @@
                         break
 
 
-                RPAREN54=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_composite626)
+                RPAREN54=self.match(self.input, RPAREN, self.FOLLOW_RPAREN_in_composite622)
                 stream_RPAREN.add(RPAREN54)
 
 
@@ -2347,9 +2366,9 @@
                 if alt26 == 1:
 
                     pass
-                    FIX55=self.match(self.input, FIX, self.FOLLOW_FIX_in_item646)
+                    FIX55=self.match(self.input, FIX, self.FOLLOW_FIX_in_item642)
                     stream_FIX.add(FIX55)
-                    self._state.following.append(self.FOLLOW_value_in_item648)
+                    self._state.following.append(self.FOLLOW_value_in_item644)
                     value56 = self.value()
 
                     self._state.following.pop()
@@ -2388,9 +2407,9 @@
                 elif alt26 == 2:
 
                     pass
-                    REWRITE57=self.match(self.input, REWRITE, self.FOLLOW_REWRITE_in_item662)
+                    REWRITE57=self.match(self.input, REWRITE, self.FOLLOW_REWRITE_in_item658)
                     stream_REWRITE.add(REWRITE57)
-                    self._state.following.append(self.FOLLOW_value_in_item664)
+                    self._state.following.append(self.FOLLOW_value_in_item660)
                     value58 = self.value()
 
                     self._state.following.pop()
@@ -2429,7 +2448,7 @@
                 elif alt26 == 3:
 
                     pass
-                    self._state.following.append(self.FOLLOW_value_in_item678)
+                    self._state.following.append(self.FOLLOW_value_in_item674)
                     value59 = self.value()
 
                     self._state.following.pop()
@@ -2521,7 +2540,7 @@
                 if alt27 == 1:
 
                     pass
-                    self._state.following.append(self.FOLLOW_text_in_value696)
+                    self._state.following.append(self.FOLLOW_text_in_value692)
                     text60 = self.text()
 
                     self._state.following.pop()
@@ -2561,7 +2580,7 @@
                 elif alt27 == 2:
 
                     pass
-                    self._state.following.append(self.FOLLOW_phrase_in_value712)
+                    self._state.following.append(self.FOLLOW_phrase_in_value708)
                     phrase61 = self.phrase()
 
                     self._state.following.pop()
@@ -2635,29 +2654,100 @@
 
         root_0 = None
 
-        set62 = None
+        t = None
+        TEXT62 = None
 
-        set62_tree = None
+        t_tree = None
+        TEXT62_tree = None
+        stream_GEO_POINT_FN = RewriteRuleTokenStream(self._adaptor, "token GEO_POINT_FN")
+        stream_DISTANCE_FN = RewriteRuleTokenStream(self._adaptor, "token DISTANCE_FN")
 
         try:
             try:
 
-
-                pass
-                root_0 = self._adaptor.nil()
-
-                set62 = self.input.LT(1)
-                if (DISTANCE_FN <= self.input.LA(1) <= GEO_POINT_FN) or self.input.LA(1) == TEXT:
-                    self.input.consume()
-                    self._adaptor.addChild(root_0, self._adaptor.createWithPayload(set62))
-                    self._state.errorRecovery = False
-
+                alt28 = 3
+                LA28 = self.input.LA(1)
+                if LA28 == TEXT:
+                    alt28 = 1
+                elif LA28 == DISTANCE_FN:
+                    alt28 = 2
+                elif LA28 == GEO_POINT_FN:
+                    alt28 = 3
                 else:
-                    mse = MismatchedSetException(None, self.input)
-                    raise mse
+                    nvae = NoViableAltException("", 28, 0, self.input)
+
+                    raise nvae
+
+                if alt28 == 1:
+
+                    pass
+                    root_0 = self._adaptor.nil()
+
+                    TEXT62=self.match(self.input, TEXT, self.FOLLOW_TEXT_in_text732)
+
+                    TEXT62_tree = self._adaptor.createWithPayload(TEXT62)
+                    self._adaptor.addChild(root_0, TEXT62_tree)
 
 
 
+                elif alt28 == 2:
+
+                    pass
+                    t=self.match(self.input, DISTANCE_FN, self.FOLLOW_DISTANCE_FN_in_text743)
+                    stream_DISTANCE_FN.add(t)
+
+
+
+
+
+
+
+
+                    retval.tree = root_0
+
+                    if retval is not None:
+                        stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", retval.tree)
+                    else:
+                        stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+                    root_0 = self._adaptor.nil()
+
+                    self._adaptor.addChild(root_0, self._adaptor.create(TEXT, t))
+
+
+
+                    retval.tree = root_0
+
+
+                elif alt28 == 3:
+
+                    pass
+                    t=self.match(self.input, GEO_POINT_FN, self.FOLLOW_GEO_POINT_FN_in_text756)
+                    stream_GEO_POINT_FN.add(t)
+
+
+
+
+
+
+
+
+                    retval.tree = root_0
+
+                    if retval is not None:
+                        stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", retval.tree)
+                    else:
+                        stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+                    root_0 = self._adaptor.nil()
+
+                    self._adaptor.addChild(root_0, self._adaptor.create(TEXT, t))
+
+
+
+                    retval.tree = root_0
 
 
                 retval.stop = self.input.LT(-1)
@@ -2712,21 +2802,21 @@
                 pass
                 root_0 = self._adaptor.nil()
 
-                QUOTE63=self.match(self.input, QUOTE, self.FOLLOW_QUOTE_in_phrase762)
+                QUOTE63=self.match(self.input, QUOTE, self.FOLLOW_QUOTE_in_phrase775)
 
                 QUOTE63_tree = self._adaptor.createWithPayload(QUOTE63)
                 self._adaptor.addChild(root_0, QUOTE63_tree)
 
 
                 while True:
-                    alt28 = 2
-                    LA28_0 = self.input.LA(1)
+                    alt29 = 2
+                    LA29_0 = self.input.LA(1)
 
-                    if ((ARGS <= LA28_0 <= TEXT) or (ESC <= LA28_0 <= 44)) :
-                        alt28 = 1
+                    if ((ARGS <= LA29_0 <= TEXT) or (ESC <= LA29_0 <= 44)) :
+                        alt29 = 1
 
 
-                    if alt28 == 1:
+                    if alt29 == 1:
 
                         pass
                         set64 = self.input.LT(1)
@@ -2746,7 +2836,7 @@
                         break
 
 
-                QUOTE65=self.match(self.input, QUOTE, self.FOLLOW_QUOTE_in_phrase780)
+                QUOTE65=self.match(self.input, QUOTE, self.FOLLOW_QUOTE_in_phrase793)
 
                 QUOTE65_tree = self._adaptor.createWithPayload(QUOTE65)
                 self._adaptor.addChild(root_0, QUOTE65_tree)
@@ -2891,54 +2981,59 @@
 
 
     DFA8_eot = DFA.unpack(
-        u"\24\uffff"
+        u"\31\uffff"
         )
 
     DFA8_eof = DFA.unpack(
-        u"\3\uffff\1\14\1\uffff\1\14\1\uffff\1\14\1\uffff\1\14\1\uffff\1"
-        u"\14\3\uffff\1\14\1\uffff\1\14\1\uffff\1\14"
+        u"\3\uffff\3\21\2\uffff\3\21\1\uffff\3\21\1\uffff\1\21\3\uffff\1"
+        u"\21\1\uffff\1\21\1\uffff\1\21"
         )
 
     DFA8_min = DFA.unpack(
-        u"\1\27\2\34\1\17\1\4\1\17\1\uffff\1\17\1\4\1\17\1\4\1\17\2\uffff"
-        u"\1\4\1\17\1\4\1\17\1\4\1\17"
+        u"\1\27\2\34\3\17\1\4\1\uffff\3\17\1\4\3\17\1\4\1\17\2\uffff\1\4"
+        u"\1\17\1\4\1\17\1\4\1\17"
         )
 
     DFA8_max = DFA.unpack(
-        u"\3\41\1\30\1\54\1\30\1\uffff\1\30\1\54\1\30\1\54\1\53\2\uffff\1"
-        u"\54\1\30\1\54\1\30\1\54\1\30"
+        u"\3\41\3\30\1\54\1\uffff\3\30\1\54\3\30\1\54\1\53\2\uffff\1\54\1"
+        u"\30\1\54\1\30\1\54\1\30"
         )
 
     DFA8_accept = DFA.unpack(
-        u"\6\uffff\1\2\5\uffff\1\3\1\1\6\uffff"
+        u"\7\uffff\1\2\11\uffff\1\3\1\1\6\uffff"
         )
 
     DFA8_special = DFA.unpack(
-        u"\24\uffff"
+        u"\31\uffff"
         )
 
 
     DFA8_transition = [
-        DFA.unpack(u"\1\6\4\uffff\2\3\1\1\1\2\1\5\1\4"),
-        DFA.unpack(u"\2\7\2\uffff\1\7\1\10"),
-        DFA.unpack(u"\2\11\2\uffff\1\11\1\12"),
-        DFA.unpack(u"\1\13\10\15\1\14"),
-        DFA.unpack(u"\35\16\1\17\13\16"),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14"),
+        DFA.unpack(u"\1\7\4\uffff\1\4\1\5\1\1\1\2\1\3\1\6"),
+        DFA.unpack(u"\1\11\1\12\2\uffff\1\10\1\13"),
+        DFA.unpack(u"\1\15\1\16\2\uffff\1\14\1\17"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\1\20\10\22\1\21"),
+        DFA.unpack(u"\1\20\10\22\1\21"),
+        DFA.unpack(u"\35\23\1\24\13\23"),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14"),
-        DFA.unpack(u"\35\20\1\21\13\20"),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14"),
-        DFA.unpack(u"\35\22\1\23\13\22"),
-        DFA.unpack(u"\1\13\7\15\13\14\11\uffff\1\14"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\35\25\1\26\13\25"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\35\27\1\30\13\27"),
+        DFA.unpack(u"\1\20\7\22\13\21\11\uffff\1\21"),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\35\16\1\17\13\16"),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14"),
-        DFA.unpack(u"\35\20\1\21\13\20"),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14"),
-        DFA.unpack(u"\35\22\1\23\13\22"),
-        DFA.unpack(u"\1\13\7\15\1\uffff\1\14")
+        DFA.unpack(u"\35\23\1\24\13\23"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\35\25\1\26\13\25"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21"),
+        DFA.unpack(u"\35\27\1\30\13\27"),
+        DFA.unpack(u"\1\20\7\22\1\uffff\1\21")
     ]
 
 
@@ -2946,78 +3041,80 @@
     DFA8 = DFA
 
 
-    FOLLOW_WS_in_query116 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_expression_in_query119 = frozenset([15])
-    FOLLOW_WS_in_query121 = frozenset([15])
-    FOLLOW_EOF_in_query124 = frozenset([1])
-    FOLLOW_sequence_in_expression143 = frozenset([1, 15])
-    FOLLOW_andOp_in_expression146 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_sequence_in_expression148 = frozenset([1, 15])
-    FOLLOW_factor_in_sequence174 = frozenset([1, 15])
-    FOLLOW_WS_in_sequence177 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_factor_in_sequence180 = frozenset([1, 15])
-    FOLLOW_term_in_factor206 = frozenset([1, 15])
-    FOLLOW_orOp_in_factor209 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_term_in_factor211 = frozenset([1, 15])
-    FOLLOW_notOp_in_term235 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_primitive_in_term237 = frozenset([1])
-    FOLLOW_primitive_in_term251 = frozenset([1])
-    FOLLOW_restrict_in_primitive267 = frozenset([1])
-    FOLLOW_composite_in_primitive273 = frozenset([1])
-    FOLLOW_item_in_primitive279 = frozenset([1])
-    FOLLOW_comparable_in_restrict305 = frozenset([15, 16, 17, 18, 19, 20, 21, 22])
-    FOLLOW_comparator_in_restrict307 = frozenset([23, 28, 29, 30, 31, 32, 33])
-    FOLLOW_arg_in_restrict309 = frozenset([1])
-    FOLLOW_WS_in_comparator333 = frozenset([15, 16, 17, 18, 19, 20, 21, 22])
-    FOLLOW_LE_in_comparator339 = frozenset([1, 15])
-    FOLLOW_LT_in_comparator345 = frozenset([1, 15])
-    FOLLOW_GE_in_comparator351 = frozenset([1, 15])
-    FOLLOW_GT_in_comparator357 = frozenset([1, 15])
-    FOLLOW_NE_in_comparator363 = frozenset([1, 15])
-    FOLLOW_EQ_in_comparator369 = frozenset([1, 15])
-    FOLLOW_HAS_in_comparator375 = frozenset([1, 15])
-    FOLLOW_WS_in_comparator378 = frozenset([1, 15])
-    FOLLOW_item_in_comparable400 = frozenset([1])
-    FOLLOW_function_in_comparable406 = frozenset([1])
-    FOLLOW_fnname_in_function421 = frozenset([23])
-    FOLLOW_LPAREN_in_function423 = frozenset([23, 24, 28, 29, 30, 31, 32, 33])
-    FOLLOW_arglist_in_function425 = frozenset([24])
-    FOLLOW_RPAREN_in_function427 = frozenset([1])
-    FOLLOW_arg_in_arglist456 = frozenset([1, 15, 44])
-    FOLLOW_sep_in_arglist459 = frozenset([23, 28, 29, 30, 31, 32, 33])
-    FOLLOW_arg_in_arglist461 = frozenset([1, 15, 44])
-    FOLLOW_item_in_arg486 = frozenset([1])
-    FOLLOW_composite_in_arg492 = frozenset([1])
-    FOLLOW_function_in_arg498 = frozenset([1])
-    FOLLOW_WS_in_andOp512 = frozenset([15, 25])
-    FOLLOW_AND_in_andOp515 = frozenset([15])
-    FOLLOW_WS_in_andOp517 = frozenset([1, 15])
-    FOLLOW_WS_in_orOp532 = frozenset([15, 26])
-    FOLLOW_OR_in_orOp535 = frozenset([15])
-    FOLLOW_WS_in_orOp537 = frozenset([1, 15])
-    FOLLOW_43_in_notOp552 = frozenset([1])
-    FOLLOW_NOT_in_notOp558 = frozenset([15])
-    FOLLOW_WS_in_notOp560 = frozenset([1, 15])
-    FOLLOW_WS_in_sep575 = frozenset([15, 44])
-    FOLLOW_44_in_sep578 = frozenset([1, 15])
-    FOLLOW_WS_in_sep580 = frozenset([1, 15])
+    FOLLOW_WS_in_query112 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_expression_in_query115 = frozenset([15])
+    FOLLOW_WS_in_query117 = frozenset([15])
+    FOLLOW_EOF_in_query120 = frozenset([1])
+    FOLLOW_sequence_in_expression139 = frozenset([1, 15])
+    FOLLOW_andOp_in_expression142 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_sequence_in_expression144 = frozenset([1, 15])
+    FOLLOW_factor_in_sequence170 = frozenset([1, 15])
+    FOLLOW_WS_in_sequence173 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_factor_in_sequence176 = frozenset([1, 15])
+    FOLLOW_term_in_factor202 = frozenset([1, 15])
+    FOLLOW_orOp_in_factor205 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_term_in_factor207 = frozenset([1, 15])
+    FOLLOW_notOp_in_term231 = frozenset([23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_primitive_in_term233 = frozenset([1])
+    FOLLOW_primitive_in_term247 = frozenset([1])
+    FOLLOW_restriction_in_primitive263 = frozenset([1])
+    FOLLOW_composite_in_primitive269 = frozenset([1])
+    FOLLOW_item_in_primitive275 = frozenset([1])
+    FOLLOW_comparable_in_restriction301 = frozenset([15, 16, 17, 18, 19, 20, 21, 22])
+    FOLLOW_comparator_in_restriction303 = frozenset([23, 28, 29, 30, 31, 32, 33])
+    FOLLOW_arg_in_restriction305 = frozenset([1])
+    FOLLOW_WS_in_comparator329 = frozenset([15, 16, 17, 18, 19, 20, 21, 22])
+    FOLLOW_LE_in_comparator335 = frozenset([1, 15])
+    FOLLOW_LESSTHAN_in_comparator341 = frozenset([1, 15])
+    FOLLOW_GE_in_comparator347 = frozenset([1, 15])
+    FOLLOW_GT_in_comparator353 = frozenset([1, 15])
+    FOLLOW_NE_in_comparator359 = frozenset([1, 15])
+    FOLLOW_EQ_in_comparator365 = frozenset([1, 15])
+    FOLLOW_HAS_in_comparator371 = frozenset([1, 15])
+    FOLLOW_WS_in_comparator374 = frozenset([1, 15])
+    FOLLOW_item_in_comparable396 = frozenset([1])
+    FOLLOW_function_in_comparable402 = frozenset([1])
+    FOLLOW_fnname_in_function417 = frozenset([23])
+    FOLLOW_LPAREN_in_function419 = frozenset([23, 24, 28, 29, 30, 31, 32, 33])
+    FOLLOW_arglist_in_function421 = frozenset([24])
+    FOLLOW_RPAREN_in_function423 = frozenset([1])
+    FOLLOW_arg_in_arglist452 = frozenset([1, 15, 44])
+    FOLLOW_sep_in_arglist455 = frozenset([23, 28, 29, 30, 31, 32, 33])
+    FOLLOW_arg_in_arglist457 = frozenset([1, 15, 44])
+    FOLLOW_item_in_arg482 = frozenset([1])
+    FOLLOW_composite_in_arg488 = frozenset([1])
+    FOLLOW_function_in_arg494 = frozenset([1])
+    FOLLOW_WS_in_andOp508 = frozenset([15, 25])
+    FOLLOW_AND_in_andOp511 = frozenset([15])
+    FOLLOW_WS_in_andOp513 = frozenset([1, 15])
+    FOLLOW_WS_in_orOp528 = frozenset([15, 26])
+    FOLLOW_OR_in_orOp531 = frozenset([15])
+    FOLLOW_WS_in_orOp533 = frozenset([1, 15])
+    FOLLOW_43_in_notOp548 = frozenset([1])
+    FOLLOW_NOT_in_notOp554 = frozenset([15])
+    FOLLOW_WS_in_notOp556 = frozenset([1, 15])
+    FOLLOW_WS_in_sep571 = frozenset([15, 44])
+    FOLLOW_44_in_sep574 = frozenset([1, 15])
+    FOLLOW_WS_in_sep576 = frozenset([1, 15])
     FOLLOW_set_in_fnname0 = frozenset([1])
-    FOLLOW_LPAREN_in_composite616 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_WS_in_composite618 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
-    FOLLOW_expression_in_composite621 = frozenset([15, 24])
-    FOLLOW_WS_in_composite623 = frozenset([15, 24])
-    FOLLOW_RPAREN_in_composite626 = frozenset([1])
-    FOLLOW_FIX_in_item646 = frozenset([28, 29, 30, 31, 32, 33])
-    FOLLOW_value_in_item648 = frozenset([1])
-    FOLLOW_REWRITE_in_item662 = frozenset([28, 29, 30, 31, 32, 33])
-    FOLLOW_value_in_item664 = frozenset([1])
-    FOLLOW_value_in_item678 = frozenset([1])
-    FOLLOW_text_in_value696 = frozenset([1])
-    FOLLOW_phrase_in_value712 = frozenset([1])
-    FOLLOW_set_in_text0 = frozenset([1])
-    FOLLOW_QUOTE_in_phrase762 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44])
-    FOLLOW_set_in_phrase764 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44])
-    FOLLOW_QUOTE_in_phrase780 = frozenset([1])
+    FOLLOW_LPAREN_in_composite612 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_WS_in_composite614 = frozenset([15, 23, 27, 28, 29, 30, 31, 32, 33, 43])
+    FOLLOW_expression_in_composite617 = frozenset([15, 24])
+    FOLLOW_WS_in_composite619 = frozenset([15, 24])
+    FOLLOW_RPAREN_in_composite622 = frozenset([1])
+    FOLLOW_FIX_in_item642 = frozenset([28, 29, 30, 31, 32, 33])
+    FOLLOW_value_in_item644 = frozenset([1])
+    FOLLOW_REWRITE_in_item658 = frozenset([28, 29, 30, 31, 32, 33])
+    FOLLOW_value_in_item660 = frozenset([1])
+    FOLLOW_value_in_item674 = frozenset([1])
+    FOLLOW_text_in_value692 = frozenset([1])
+    FOLLOW_phrase_in_value708 = frozenset([1])
+    FOLLOW_TEXT_in_text732 = frozenset([1])
+    FOLLOW_DISTANCE_FN_in_text743 = frozenset([1])
+    FOLLOW_GEO_POINT_FN_in_text756 = frozenset([1])
+    FOLLOW_QUOTE_in_phrase775 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44])
+    FOLLOW_set_in_phrase777 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44])
+    FOLLOW_QUOTE_in_phrase793 = frozenset([1])
 
 
 
diff --git a/google/appengine/api/search/query_parser.py b/google/appengine/api/search/query_parser.py
index 3860961..f20d803 100644
--- a/google/appengine/api/search/query_parser.py
+++ b/google/appengine/api/search/query_parser.py
@@ -16,11 +16,8 @@
 #
 
 
-
-
 """Wrapper for QueryParser."""
 
-
 from google.appengine._internal import antlr3
 from google.appengine._internal.antlr3 import tree
 from google.appengine.api.search import QueryLexer
@@ -32,7 +29,7 @@
     QueryParser.NE,
     QueryParser.GT,
     QueryParser.GE,
-    QueryParser.LT,
+    QueryParser.LESSTHAN,
     QueryParser.LE,
     ]
 
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index 4017439..a7adb64 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -49,6 +49,7 @@
 
 __all__ = [
     'AtomField',
+    'ConcurrentTransactionError',
     'Cursor',
     'DateField',
     'DeleteError',
@@ -103,6 +104,7 @@
     'SortExpression',
     'SortOptions',
     'TextField',
+    'Timeout',
     'TIMESTAMP_FIELD_NAME',
     'TransientError',
     ]
@@ -188,6 +190,14 @@
   """An error occurred while parsing an expression input string."""
 
 
+class Timeout(Error):
+  """Indicates a call on the search API could not finish before its deadline."""
+
+
+class ConcurrentTransactionError(Error):
+  """Indicates a call on the search API failed due to concurrent updates."""
+
+
 def _ConvertToUnicode(some_string):
   """Convert UTF-8 encoded string to unicode."""
   if some_string is None:
@@ -210,10 +220,13 @@
   This is an abstract class.
   """
 
-  OK, INVALID_REQUEST, TRANSIENT_ERROR, INTERNAL_ERROR = (
-      'OK', 'INVALID_REQUEST', 'TRANSIENT_ERROR', 'INTERNAL_ERROR')
+  (OK, INVALID_REQUEST, TRANSIENT_ERROR, INTERNAL_ERROR,
+  TIMEOUT,  CONCURRENT_TRANSACTION) = (
+      'OK', 'INVALID_REQUEST', 'TRANSIENT_ERROR', 'INTERNAL_ERROR',
+      'TIMEOUT', 'CONCURRENT_TRANSACTION')
 
-  _CODES = frozenset([OK, INVALID_REQUEST, TRANSIENT_ERROR, INTERNAL_ERROR])
+  _CODES = frozenset([OK, INVALID_REQUEST, TRANSIENT_ERROR, INTERNAL_ERROR,
+                      TIMEOUT, CONCURRENT_TRANSACTION])
 
   def __init__(self, code, message=None, id=None):
     """Initializer.
@@ -261,7 +274,11 @@
     search_service_pb.SearchServiceError.TRANSIENT_ERROR:
     OperationResult.TRANSIENT_ERROR,
     search_service_pb.SearchServiceError.INTERNAL_ERROR:
-    OperationResult.INTERNAL_ERROR
+    OperationResult.INTERNAL_ERROR,
+    search_service_pb.SearchServiceError.TIMEOUT:
+    OperationResult.TIMEOUT,
+    search_service_pb.SearchServiceError.CONCURRENT_TRANSACTION:
+    OperationResult.CONCURRENT_TRANSACTION,
     }
 
 
@@ -318,7 +335,10 @@
 _ERROR_MAP = {
     search_service_pb.SearchServiceError.INVALID_REQUEST: InvalidRequest,
     search_service_pb.SearchServiceError.TRANSIENT_ERROR: TransientError,
-    search_service_pb.SearchServiceError.INTERNAL_ERROR: InternalError
+    search_service_pb.SearchServiceError.INTERNAL_ERROR: InternalError,
+    search_service_pb.SearchServiceError.TIMEOUT: Timeout,
+    search_service_pb.SearchServiceError.CONCURRENT_TRANSACTION:
+    ConcurrentTransactionError,
     }
 
 
@@ -2414,7 +2434,8 @@
     message = None
     if status_pb.has_error_detail():
       message = _DecodeUTF8(status_pb.error_detail())
-    code = _ERROR_OPERATION_CODE_MAP[status_pb.code()]
+    code = _ERROR_OPERATION_CODE_MAP.get(status_pb.code(),
+                                         OperationResult.INTERNAL_ERROR)
     return PutResult(code=code, message=message, id=_DecodeUTF8(doc_id))
 
   def _NewPutResultList(self, response):
@@ -2503,7 +2524,8 @@
     message = None
     if status_pb.has_error_detail():
       message = _DecodeUTF8(status_pb.error_detail())
-    code = _ERROR_OPERATION_CODE_MAP[status_pb.code()]
+    code = _ERROR_OPERATION_CODE_MAP.get(status_pb.code(),
+                                         OperationResult.INTERNAL_ERROR)
 
     return DeleteResult(code=code, message=message, id=doc_id)
 
diff --git a/google/appengine/api/search/search_service_pb.py b/google/appengine/api/search/search_service_pb.py
index a715f47..5fc8266 100644
--- a/google/appengine/api/search/search_service_pb.py
+++ b/google/appengine/api/search/search_service_pb.py
@@ -41,6 +41,8 @@
   TRANSIENT_ERROR =    2
   INTERNAL_ERROR =    3
   PERMISSION_DENIED =    4
+  TIMEOUT      =    5
+  CONCURRENT_TRANSACTION =    6
 
   _ErrorCode_NAMES = {
     0: "OK",
@@ -48,6 +50,8 @@
     2: "TRANSIENT_ERROR",
     3: "INTERNAL_ERROR",
     4: "PERMISSION_DENIED",
+    5: "TIMEOUT",
+    6: "CONCURRENT_TRANSACTION",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index 5784d95..9bb25e8 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -914,6 +914,10 @@
       self._InvalidRequest(response.mutable_status(), e)
       response.set_matched_count(0)
       return
+    except document_matcher.ExpressionTreeException, e:
+      self._InvalidRequest(response.mutable_status(), e)
+      response.set_matched_count(0)
+      return
     response.set_matched_count(len(results))
 
     offset = 0
diff --git a/google/appengine/api/search/stub/document_matcher.py b/google/appengine/api/search/stub/document_matcher.py
index 1ebce2a..4bd2ddc 100644
--- a/google/appengine/api/search/stub/document_matcher.py
+++ b/google/appengine/api/search/stub/document_matcher.py
@@ -16,16 +16,12 @@
 #
 
 
-
-
 """Document matcher for Full Text Search API stub.
 
 DocumentMatcher provides an approximation of the Full Text Search API's query
 matching.
 """
 
-
-
 import logging
 
 from google.appengine.datastore import document_pb
@@ -38,6 +34,13 @@
 from google.appengine.api.search.stub import tokens
 
 
+class ExpressionTreeException(Exception):
+  """An error occurred while analyzing/translating the expression parse tree."""
+
+  def __init__(self, msg):
+    Exception.__init__(self, msg)
+
+
 class DocumentMatcher(object):
   """A class to match documents with a query."""
 
@@ -173,8 +176,8 @@
       self, field, match, cast_to_type, op, document):
     """A generic method to test matching for comparable types.
 
-    Comparable types are defined to be anything that supports <, >, <=, >=, ==
-    and !=. For our purposes, this is numbers and dates.
+    Comparable types are defined to be anything that supports <, >, <=, >=, ==.
+    For our purposes, this is numbers and dates.
 
     Args:
       field: The document_pb.Field to test
@@ -189,6 +192,7 @@
     Raises:
       UnsupportedOnDevError: Raised when an unsupported operator is used, or
       when the query node is of the wrong type.
+      ExpressionTreeException: Raised when a != inequality operator is used.
     """
 
     field_val = cast_to_type(field.value().string_value())
@@ -204,12 +208,12 @@
     if op == QueryParser.EQ:
       return field_val == match_val
     if op == QueryParser.NE:
-      return field_val != match_val
+      raise ExpressionTreeException('!= comparison operator is not available')
     if op == QueryParser.GT:
       return field_val > match_val
     if op == QueryParser.GE:
       return field_val >= match_val
-    if op == QueryParser.LT:
+    if op == QueryParser.LESSTHAN:
       return field_val < match_val
     if op == QueryParser.LE:
       return field_val <= match_val
diff --git a/google/appengine/api/search/stub/expression_evaluator.py b/google/appengine/api/search/stub/expression_evaluator.py
index 316b33e..ad9180e 100644
--- a/google/appengine/api/search/stub/expression_evaluator.py
+++ b/google/appengine/api/search/stub/expression_evaluator.py
@@ -106,6 +106,15 @@
     return max(self._Eval(node) for node in nodes)
 
   def _Count(self, node):
+
+
+
+
+
+
+    if node.getType() != ExpressionParser.NAME:
+      raise _ExpressionError(
+          'The argument to count() must be a simple field name')
     return search_util.GetFieldCountInDocument(
         self._doc_pb, query_parser.GetQueryNodeText(node))
 
diff --git a/google/appengine/api/taskqueue/taskqueue_stub.py b/google/appengine/api/taskqueue/taskqueue_stub.py
index fcb248c..b017662 100644
--- a/google/appengine/api/taskqueue/taskqueue_stub.py
+++ b/google/appengine/api/taskqueue/taskqueue_stub.py
@@ -1852,7 +1852,7 @@
                     str(_UsecToSec(task.eta_usec()))))
     headers.append(('X-AppEngine-Fake-Is-Admin', '1'))
     headers.append(('Content-Length', str(len(task.body()))))
-    if 'content-type' not in header_dict:
+    if task.has_body() and 'content-type' not in header_dict:
       headers.append(('Content-Type', 'application/octet-stream'))
     headers.append(('X-AppEngine-TaskExecutionCount',
                     str(task.execution_count())))
diff --git a/google/appengine/base/memcache_sharding_strategy_pb.py b/google/appengine/base/memcache_sharding_strategy_pb.py
deleted file mode 100644
index ae5ec13..0000000
--- a/google/appengine/base/memcache_sharding_strategy_pb.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2007 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-from google.net.proto import ProtocolBuffer
-import array
-import dummy_thread as thread
-
-__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
-                   unusednames=printElemNumber,debug_strs no-special"""
-
-if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
-  _extension_runtime = True
-  _ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
-else:
-  _extension_runtime = False
-  _ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
-
-class MemcacheShardingStrategy(ProtocolBuffer.ProtocolMessage):
-
-
-  NAMESPACE_OR_KEY =    0
-  NAMESPACE_AND_KEY =    1
-
-  _Enum_NAMES = {
-    0: "NAMESPACE_OR_KEY",
-    1: "NAMESPACE_AND_KEY",
-  }
-
-  def Enum_Name(cls, x): return cls._Enum_NAMES.get(x, "")
-  Enum_Name = classmethod(Enum_Name)
-
-
-  def __init__(self, contents=None):
-    pass
-    if contents is not None: self.MergeFromString(contents)
-
-
-  def MergeFrom(self, x):
-    assert x is not self
-
-  def Equals(self, x):
-    if x is self: return 1
-    return 1
-
-  def IsInitialized(self, debug_strs=None):
-    initialized = 1
-    return initialized
-
-  def ByteSize(self):
-    n = 0
-    return n
-
-  def ByteSizePartial(self):
-    n = 0
-    return n
-
-  def Clear(self):
-    pass
-
-  def OutputUnchecked(self, out):
-    pass
-
-  def OutputPartial(self, out):
-    pass
-
-  def TryMerge(self, d):
-    while d.avail() > 0:
-      tt = d.getVarInt32()
-
-
-      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
-      d.skipData(tt)
-
-
-  def __str__(self, prefix="", printElemNumber=0):
-    res=""
-    return res
-
-
-  def _BuildTagLookupTable(sparse, maxtag, default=None):
-    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-
-  _TEXT = _BuildTagLookupTable({
-    0: "ErrorCode",
-  }, 0)
-
-  _TYPES = _BuildTagLookupTable({
-    0: ProtocolBuffer.Encoder.NUMERIC,
-  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
-  _STYLE = """"""
-  _STYLE_CONTENT_TYPE = """"""
-  _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheShardingStrategy'
-if _extension_runtime:
-  pass
-
-__all__ = ['MemcacheShardingStrategy']
diff --git a/google/appengine/ext/analytics/static/analytics_js.js b/google/appengine/ext/analytics/static/analytics_js.js
index d2033c3..05cb1dd 100644
--- a/google/appengine/ext/analytics/static/analytics_js.js
+++ b/google/appengine/ext/analytics/static/analytics_js.js
@@ -1,24 +1,23 @@
-/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var n,p=this,r=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";
-else if("function"==b&&"undefined"==typeof a.call)return"object";return b},s=function(a){return"string"==typeof a},t="closure_uid_"+(1E9*Math.random()>>>0),u=0,w=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},x=function(a,b){var c=a.split("."),e=p;c[0]in e||!e.execScript||e.execScript("var "+c[0]);for(var d;c.length&&(d=c.shift());)c.length||void 0===b?e=e[d]?e[d]:e[d]={}:e[d]=
-b},y=function(a,b){function c(){}c.prototype=b.prototype;a.m=b.prototype;a.prototype=new c};var z=function(a){Error.captureStackTrace?Error.captureStackTrace(this,z):this.stack=Error().stack||"";a&&(this.message=String(a))};y(z,Error);var aa=function(a,b){for(var c=a.split("%s"),e="",d=Array.prototype.slice.call(arguments,1);d.length&&1<c.length;)e+=c.shift()+d.shift();return e+c.join("%s")};var A=function(a,b){b.unshift(a);z.call(this,aa.apply(null,b));b.shift()};y(A,z);var B=function(a,b,c){if(!a){var e=Array.prototype.slice.call(arguments,2),d="Assertion failed";if(b)var d=d+(": "+b),f=e;throw new A(""+d,f||[]);}};var C=Array.prototype,D=C.indexOf?function(a,b,c){B(null!=a.length);return C.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(s(a))return s(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ba=C.forEach?function(a,b,c){B(null!=a.length);C.forEach.call(a,b,c)}:function(a,b,c){for(var e=a.length,d=s(a)?a.split(""):a,f=0;f<e;f++)f in d&&b.call(c,d[f],f,a)},ca=C.filter?function(a,b,c){B(null!=a.length);return C.filter.call(a,b,
-c)}:function(a,b,c){for(var e=a.length,d=[],f=0,g=s(a)?a.split(""):a,h=0;h<e;h++)if(h in g){var l=g[h];b.call(c,l,h,a)&&(d[f++]=l)}return d},da=function(a,b){var c=D(a,b),e;if(e=0<=c)B(null!=a.length),C.splice.call(a,c,1);return e},ea=function(a){var b=a.length;if(0<b){for(var c=Array(b),e=0;e<b;e++)c[e]=a[e];return c}return[]},fa=function(a,b,c){B(null!=a.length);return 2>=arguments.length?C.slice.call(a,b):C.slice.call(a,b,c)};var E,F,G,H,ga=function(){return p.navigator?p.navigator.userAgent:null};H=G=F=E=!1;var I;if(I=ga()){var ha=p.navigator;E=0==I.indexOf("Opera");F=!E&&-1!=I.indexOf("MSIE");G=!E&&-1!=I.indexOf("WebKit");H=!E&&!G&&"Gecko"==ha.product}var ia=E,J=F,K=H,L=G,ja=function(){var a=p.document;return a?a.documentMode:void 0},M;
-n:{var N="",O;if(ia&&p.opera)var P=p.opera.version,N="function"==typeof P?P():P;else if(K?O=/rv\:([^\);]+)(\)|;)/:J?O=/MSIE\s+([^\);]+)(\)|;)/:L&&(O=/WebKit\/(\S+)/),O)var ka=O.exec(ga()),N=ka?ka[1]:"";if(J){var la=ja();if(la>parseFloat(N)){M=String(la);break n}}M=N}
-var ma=M,na={},Q=function(a){var b;if(!(b=na[a])){b=0;for(var c=String(ma).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=Math.max(c.length,e.length),f=0;0==b&&f<d;f++){var g=c[f]||"",h=e[f]||"",l=RegExp("(\\d*)(\\D*)","g"),m=RegExp("(\\d*)(\\D*)","g");do{var k=l.exec(g)||["","",""],q=m.exec(h)||["","",""];if(0==k[0].length&&0==q[0].length)break;b=((0==k[1].length?0:parseInt(k[1],10))<(0==q[1].length?0:parseInt(q[1],10))?-1:(0==k[1].length?
-0:parseInt(k[1],10))>(0==q[1].length?0:parseInt(q[1],10))?1:0)||((0==k[2].length)<(0==q[2].length)?-1:(0==k[2].length)>(0==q[2].length)?1:0)||(k[2]<q[2]?-1:k[2]>q[2]?1:0)}while(0==b)}b=na[a]=0<=b}return b},oa=p.document,pa=oa&&J?ja()||("CSS1Compat"==oa.compatMode?parseInt(ma,10):5):void 0;!K&&!J||J&&J&&9<=pa||K&&Q("1.9.1");J&&Q("9");var qa=function(a){a=a.className;return s(a)&&a.match(/\S+/g)||[]},ra=function(a,b){for(var c=qa(a),e=fa(arguments,1),d=c,f=0;f<e.length;f++)0<=D(d,e[f])||d.push(e[f]);c=c.join(" ");a.className=c},ta=function(a,b){var c=qa(a),e=fa(arguments,1),c=sa(c,e).join(" ");a.className=c},sa=function(a,b){return ca(a,function(a){return!(0<=D(b,a))})};var R=function(a,b,c){var e=document;c=c||e;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var e={},d=0,f=0,g;g=c[f];f++)a==g.nodeName&&(e[d++]=g);e.length=d;return e}return c}c=c.getElementsByTagName(a||"*");if(b){e={};for(f=d=0;g=c[f];f++)a=g.className,"function"==typeof a.split&&0<=D(a.split(/\s+/),b)&&(e[d++]=g);e.length=d;return e}return c};var S=function(a){S[" "](a);return a};S[" "]=function(){};var ua=!J||J&&9<=pa,va=J&&!Q("9");!L||Q("528");K&&Q("1.9b")||J&&Q("8")||ia&&Q("9.5")||L&&Q("528");K&&!Q("8")||J&&Q("9");var T=function(a,b){this.type=a;this.currentTarget=this.target=b};T.prototype.i=!1;T.prototype.defaultPrevented=!1;T.prototype.preventDefault=function(){this.defaultPrevented=!0};var U=function(a,b){a&&wa(this,a,b)};y(U,T);n=U.prototype;n.target=null;n.relatedTarget=null;n.offsetX=0;n.offsetY=0;n.clientX=0;n.clientY=0;n.screenX=0;n.screenY=0;n.button=0;n.keyCode=0;n.charCode=0;n.ctrlKey=!1;n.altKey=!1;n.shiftKey=!1;n.metaKey=!1;n.l=null;
-var wa=function(a,b,c){var e=a.type=b.type;T.call(a,e);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(K){var d;n:{try{S(c.nodeName);d=!0;break n}catch(f){}d=!1}d||(c=null)}}else"mouseover"==e?c=b.fromElement:"mouseout"==e&&(c=b.toElement);a.relatedTarget=c;a.offsetX=L||void 0!==b.offsetX?b.offsetX:b.layerX;a.offsetY=L||void 0!==b.offsetY?b.offsetY:b.layerY;a.clientX=void 0!==b.clientX?b.clientX:b.pageX;a.clientY=void 0!==b.clientY?b.clientY:b.pageY;a.screenX=b.screenX||
-0;a.screenY=b.screenY||0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==e?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.state=b.state;a.l=b;b.defaultPrevented&&a.preventDefault();delete a.i};U.prototype.preventDefault=function(){U.m.preventDefault.call(this);var a=this.l;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,va)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var xa="closure_listenable_"+(1E6*Math.random()|0),ya=0;var za=function(a,b,c,e,d,f){this.d=a;this.k=b;this.src=c;this.type=e;this.capture=!!d;this.e=f;this.key=++ya;this.c=this.h=!1};var V={},W={},X={},Y={},Ba=function(){var a=Aa,b=ua?function(c){return a.call(b.src,b.d,c)}:function(c){c=a.call(b.src,b.d,c);if(!c)return c};return b},Ca=function(a,b,c,e,d){if("array"==r(b))for(var f=0;f<b.length;f++)Ca(a,b[f],c,e,d);else if(c=Da(c),a&&a[xa]){f=c;B(a.f,"Event target is not initialized. Did you call superclass (goog.events.EventTarget) constructor?");c=a.f[b]||(a.f[b]=[]);var g;t:{for(g=0;g<c.length;++g){var h=c[g];if(h.d==f&&h.capture==!!e&&h.e==d)break t}g=-1}-1<g||(a=new za(f,
-null,a,b,!!e,d),a.h=!0,c.push(a))}else n:{if(!b)throw Error("Invalid event type");e=!!e;h=W;b in h||(h[b]={a:0,b:0});h=h[b];e in h||(h[e]={a:0,b:0},h.a++);h=h[e];f=a[t]||(a[t]=++u);h.b++;if(h[f]){g=h[f];for(var l=0;l<g.length;l++)if(h=g[l],h.d==c&&h.e==d){if(h.c)break;break n}}else g=h[f]=[],h.a++;l=Ba();h=new za(c,l,a,b,e,d);h.h=!0;l.src=a;l.d=h;g.push(h);X[f]||(X[f]=[]);X[f].push(h);a.addEventListener?a.addEventListener(b,l,e):a.attachEvent(b in Y?Y[b]:Y[b]="on"+b,l);V[h.key]=h}},Ea=function(a,
-b,c,e){if(!e.g&&e.j){for(var d=0,f=0;d<e.length;d++)e[d].c||(d!=f&&(e[f]=e[d]),f++);e.length=f;e.j=!1;0==f&&(delete W[a][b][c],W[a][b].a--,0==W[a][b].a&&(delete W[a][b],W[a].a--),0==W[a].a&&delete W[a])}},Fa=function(a,b,c,e,d){var f=1;b=b[t]||(b[t]=++u);if(a[b]){var g=--a.b,h=a[b];h.g?h.g++:h.g=1;try{for(var l=h.length,m=0;m<l;m++){var k=h[m];k&&!k.c&&(f&=!1!==Z(k,d))}}finally{a.b=Math.max(g,a.b),h.g--,Ea(c,e,b,h)}}return Boolean(f)},Z=function(a,b){var c=a.d,e=a.e||a.src;if(a.h&&"number"!=typeof a&&
-a&&!a.c){var d=a.src;if(d&&d[xa]){var f=a.type;f in d.f&&da(d.f[f],a)&&(delete V[a.key],a.c=!0)}else{var f=a.type,g=a.k,h=a.capture;d.removeEventListener?d.removeEventListener(f,g,h):d.detachEvent&&d.detachEvent(f in Y?Y[f]:Y[f]="on"+f,g);d=d[t]||(d[t]=++u);X[d]&&(g=X[d],da(g,a),0==g.length&&delete X[d]);a.c=!0;a.d=null;a.k=null;a.src=null;a.e=null;if(g=W[f][h][d])g.j=!0,Ea(f,h,d,g);delete V[a.key]}}return c.call(e,b)},Aa=function(a,b){if(a.c)return!0;var c=a.type,e=W;if(!(c in e))return!0;var e=
-e[c],d,f;if(!ua){var g;if(!(g=b))n:{g=["window","event"];for(var h=p;d=g.shift();)if(null!=h[d])h=h[d];else{g=null;break n}g=h}d=g;g=!0 in e;h=!1 in e;if(g){if(0>d.keyCode||void 0!=d.returnValue)return!0;n:{var l=!1;if(0==d.keyCode)try{d.keyCode=-1;break n}catch(m){l=!0}if(l||void 0==d.returnValue)d.returnValue=!0}}l=new U;wa(l,d,this);d=!0;try{if(g){for(var k=[],q=l.currentTarget;q;q=q.parentNode)k.push(q);f=e[!0];f.b=f.a;for(var v=k.length-1;!l.i&&0<=v&&f.b;v--)l.currentTarget=k[v],d&=Fa(f,k[v],
-c,!0,l);if(h)for(f=e[!1],f.b=f.a,v=0;!l.i&&v<k.length&&f.b;v++)l.currentTarget=k[v],d&=Fa(f,k[v],c,!1,l)}else d=Z(a,l)}finally{k&&(k.length=0)}return d}c=new U(b,this);return d=Z(a,c)},Ga="__closure_events_fn_"+(1E9*Math.random()>>>0),Da=function(a){B(a,"Listener can not be null.");if("function"==r(a))return a;B(a.handleEvent,"An object listener must have handleEvent method.");return a[Ga]||(a[Ga]=function(b){return a.handleEvent(b)})};var $=function(){};$.n=function(){$.o||($.o=new $)};$.n();J||L&&Q("525");x("ae.init",function(){Ha();Ia();Ca(window,"load",function(){});Ja()});
-var Ha=function(){var a;if(a=s("ae-content")?document.getElementById("ae-content"):"ae-content"){a=R("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=R("tbody",null,c);for(var e=0,d;d=c[e];e++){d=R("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&ra(g,"ae-even")}}}},Ia=function(){var a=R(null,"ae-noscript",void 0);ba(ea(a),function(a){ta(a,"ae-noscript")})},Ja=function(){p._gaq=p._gaq||[];p._gaq.push(function(){p._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
-document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};x("ae.trackPageView",function(){p._gaq&&p._gaq._getAsyncTracker("ae")._trackPageview()});var La=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Ka(a)},Ka=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Ma=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
-Na=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=La(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
-w(Ma,a,e,d))};x("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=La(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
-x("ae.Charts.latencyTimestampScatter",function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("number","Time (seconds from start)");f.addColumn("number","Latency");for(var g=0;g<a.length;g++){var h=Math.round(a[g]-c);f.addRow([h,b[g]])}a=e.starttime?e.starttime:0;b=new google.visualization.ScatterChart(document.getElementById("LatencyVsTimestamp"));b.draw(f,{hAxis:{title:"Time (seconds from start of recording)",minValue:a},vAxis:{title:"Request Latency (milliseconds)",minValue:0},
-tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",w(Ma,b,e,d))});
-x("ae.Charts.entityCountBarChart",function(a,b,c,e){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","Reads");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Misses");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Writes");d.addColumn({type:"string",role:"tooltip"});var f=50;f>b.length&&(f=b.length);for(var g=0;g<f;g++)d.addRow(["",b[g][1]-b[g][3],b[g][0],b[g][3],b[g][0],b[g][2],b[g][0]]);b=20*f;f=b+130;a=new google.visualization.ColumnChart(document.getElementById(e+
-"-"+a));c=Ka(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
-x("ae.Charts.rpcVariationCandlestick",function(a){var b=new google.visualization.DataTable;b.addColumn("string","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addRows(a);(new google.visualization.CandlestickChart(document.getElementById("rpcvariation"))).draw(b,{vAxis:{title:"RPC Latency variation (milliseconds)"},hAxis:{textPosition:"out",slantedText:!0,slantedTextAngle:45,textStyle:{fontSize:13}},height:250,chartArea:{top:10,height:100},
-legend:"none",tooltip:{trigger:"none"}})});x("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";Na(a,b,d,c,e)});x("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,l=c.times;c=c.stats;for(var m=0;m<b;m++)f[m]=0,g[m]=null;for(m=0;m<h.length;m++){f[h[m]]=l[m];b=c[m];var k="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])k+=" Entities";0<b[1]&&(k+=" R:"+b[1]);0<b[2]&&(k+=" W:"+b[2]);0<b[3]&&(k+=" M:"+b[3]);g[h[m]]=k}Na(a,f,g,e,d)});})();
+/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var n,q=this,r=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";
+else if("function"==b&&"undefined"==typeof a.call)return"object";return b},s=function(a){return"string"==typeof a},t="closure_uid_"+(1E9*Math.random()>>>0),u=0,v=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},w=function(a,b){var c=a.split("."),e=q;c[0]in e||!e.execScript||e.execScript("var "+c[0]);for(var d;c.length&&(d=c.shift());)c.length||void 0===b?e=e[d]?e[d]:e[d]={}:e[d]=
+b},x=function(a,b){function c(){}c.prototype=b.prototype;a.k=b.prototype;a.prototype=new c};var y=function(a){Error.captureStackTrace?Error.captureStackTrace(this,y):this.stack=Error().stack||"";a&&(this.message=String(a))};x(y,Error);var aa=function(a,b){for(var c=a.split("%s"),e="",d=Array.prototype.slice.call(arguments,1);d.length&&1<c.length;)e+=c.shift()+d.shift();return e+c.join("%s")};var z=function(a,b){b.unshift(a);y.call(this,aa.apply(null,b));b.shift()};x(z,y);var A=function(a,b,c){if(!a){var e=Array.prototype.slice.call(arguments,2),d="Assertion failed";if(b)var d=d+(": "+b),f=e;throw new z(""+d,f||[]);}};var B=Array.prototype,C=B.indexOf?function(a,b,c){A(null!=a.length);return B.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(s(a))return s(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ba=B.forEach?function(a,b,c){A(null!=a.length);B.forEach.call(a,b,c)}:function(a,b,c){for(var e=a.length,d=s(a)?a.split(""):a,f=0;f<e;f++)f in d&&b.call(c,d[f],f,a)},ca=B.filter?function(a,b,c){A(null!=a.length);return B.filter.call(a,b,
+c)}:function(a,b,c){for(var e=a.length,d=[],f=0,g=s(a)?a.split(""):a,h=0;h<e;h++)if(h in g){var p=g[h];b.call(c,p,h,a)&&(d[f++]=p)}return d},D=function(a,b){var c=C(a,b),e;if(e=0<=c)A(null!=a.length),B.splice.call(a,c,1);return e},E=function(a){var b=a.length;if(0<b){for(var c=Array(b),e=0;e<b;e++)c[e]=a[e];return c}return[]},da=function(a,b,c){A(null!=a.length);return 2>=arguments.length?B.slice.call(a,b):B.slice.call(a,b,c)};var F,G,H,I,ea=function(){return q.navigator?q.navigator.userAgent:null};I=H=G=F=!1;var J;if(J=ea()){var fa=q.navigator;F=0==J.lastIndexOf("Opera",0);G=!F&&(-1!=J.indexOf("MSIE")||-1!=J.indexOf("Trident"));H=!F&&-1!=J.indexOf("WebKit");I=!F&&!H&&!G&&"Gecko"==fa.product}var ga=F,K=G,L=I,M=H,ha=function(){var a=q.document;return a?a.documentMode:void 0},N;
+n:{var O="",P;if(ga&&q.opera)var Q=q.opera.version,O="function"==typeof Q?Q():Q;else if(L?P=/rv\:([^\);]+)(\)|;)/:K?P=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:M&&(P=/WebKit\/(\S+)/),P)var ia=P.exec(ea()),O=ia?ia[1]:"";if(K){var ja=ha();if(ja>parseFloat(O)){N=String(ja);break n}}N=O}
+var ka=N,la={},R=function(a){var b;if(!(b=la[a])){b=0;for(var c=String(ka).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=Math.max(c.length,e.length),f=0;0==b&&f<d;f++){var g=c[f]||"",h=e[f]||"",p=RegExp("(\\d*)(\\D*)","g"),l=RegExp("(\\d*)(\\D*)","g");do{var k=p.exec(g)||["","",""],m=l.exec(h)||["","",""];if(0==k[0].length&&0==m[0].length)break;b=((0==k[1].length?0:parseInt(k[1],10))<(0==m[1].length?0:parseInt(m[1],10))?-1:(0==k[1].length?
+0:parseInt(k[1],10))>(0==m[1].length?0:parseInt(m[1],10))?1:0)||((0==k[2].length)<(0==m[2].length)?-1:(0==k[2].length)>(0==m[2].length)?1:0)||(k[2]<m[2]?-1:k[2]>m[2]?1:0)}while(0==b)}b=la[a]=0<=b}return b},ma=q.document,na=ma&&K?ha()||("CSS1Compat"==ma.compatMode?parseInt(ka,10):5):void 0;!L&&!K||K&&K&&9<=na||L&&R("1.9.1");K&&R("9");var oa=function(a){a=a.className;return s(a)&&a.match(/\S+/g)||[]},pa=function(a,b){for(var c=oa(a),e=da(arguments,1),d=c,f=0;f<e.length;f++)0<=C(d,e[f])||d.push(e[f]);c=c.join(" ");a.className=c},ra=function(a,b){var c=oa(a),e=da(arguments,1),c=qa(c,e).join(" ");a.className=c},qa=function(a,b){return ca(a,function(a){return!(0<=C(b,a))})};var S=function(a,b,c){var e=document;c=c||e;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var e={},d=0,f=0,g;g=c[f];f++)a==g.nodeName&&(e[d++]=g);e.length=d;return e}return c}c=c.getElementsByTagName(a||"*");if(b){e={};for(f=d=0;g=c[f];f++)a=g.className,"function"==typeof a.split&&0<=C(a.split(/\s+/),b)&&(e[d++]=g);e.length=d;return e}return c};var T=function(a){T[" "](a);return a};T[" "]=function(){};var sa=!K||K&&9<=na,ta=K&&!R("9");!M||R("528");L&&R("1.9b")||K&&R("8")||ga&&R("9.5")||M&&R("528");L&&!R("8")||K&&R("9");var U=function(a,b){this.type=a;this.currentTarget=this.target=b};U.prototype.f=!1;U.prototype.defaultPrevented=!1;U.prototype.preventDefault=function(){this.defaultPrevented=!0};var V=function(a,b){if(a){var c=this.type=a.type;U.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var e=a.relatedTarget;if(e){if(L){var d;n:{try{T(e.nodeName);d=!0;break n}catch(f){}d=!1}d||(e=null)}}else"mouseover"==c?e=a.fromElement:"mouseout"==c&&(e=a.toElement);this.relatedTarget=e;this.offsetX=M||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=M||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:
+a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.i=a;a.defaultPrevented&&this.preventDefault();delete this.f}};x(V,U);n=V.prototype;n.target=null;n.relatedTarget=null;n.offsetX=0;n.offsetY=0;n.clientX=0;n.clientY=0;n.screenX=0;n.screenY=0;n.button=0;n.keyCode=0;
+n.charCode=0;n.ctrlKey=!1;n.altKey=!1;n.shiftKey=!1;n.metaKey=!1;n.i=null;n.preventDefault=function(){V.k.preventDefault.call(this);var a=this.i;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,ta)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var ua="closure_listenable_"+(1E6*Math.random()|0),va=0;var wa=function(a,b,c,e,d,f){this.b=a;this.h=b;this.src=c;this.type=e;this.capture=!!d;this.d=f;this.key=++va;this.c=this.g=!1},xa=function(a){a.c=!0;a.b=null;a.h=null;a.src=null;a.d=null};var ya={},W={},X={},Y={},Aa=function(){var a=za,b=sa?function(c){return a.call(b.src,b.b,c)}:function(c){c=a.call(b.src,b.b,c);if(!c)return c};return b},Ba=function(a,b,c,e,d){if("array"==r(b))for(var f=0;f<b.length;f++)Ba(a,b[f],c,e,d);else if(c=Ca(c),a&&a[ua])a.j.add(b,c,!0,e,d);else n:{if(!b)throw Error("Invalid event type");e=!!e;var g=W;b in g||(g[b]={a:0});g=g[b];e in g||(g[e]={a:0},g.a++);var g=g[e],f=a[t]||(a[t]=++u),h;if(g[f]){h=g[f];for(var p=0;p<h.length;p++)if(g=h[p],g.b==c&&g.d==d){if(g.c)break;
+break n}}else h=g[f]=[],g.a++;p=Aa();g=new wa(c,p,a,b,e,d);g.g=!0;p.src=a;p.b=g;h.push(g);X[f]||(X[f]=[]);X[f].push(g);a.addEventListener?a.addEventListener(b,p,e):a.attachEvent(b in Y?Y[b]:Y[b]="on"+b,p);ya[g.key]=g}},Da=function(a,b,c){var e=1;b=b[t]||(b[t]=++u);if(a[b])for(a=E(a[b]),b=0;b<a.length;b++){var d=a[b];d&&!d.c&&(e&=!1!==Z(d,c))}return Boolean(e)},Z=function(a,b){var c=a.b,e=a.d||a.src;if(a.g&&"number"!=typeof a&&a&&!a.c){var d=a.src;if(d&&d[ua]){var f=d.j,g=a.type;g in f.e&&D(f.e[g],
+a)&&(xa(a),0==f.e[g].length&&delete f.e[g])}else{var f=a.type,h=a.h,g=a.capture;d.removeEventListener?d.removeEventListener(f,h,g):d.detachEvent&&d.detachEvent(f in Y?Y[f]:Y[f]="on"+f,h);d=d[t]||(d[t]=++u);X[d]&&(h=X[d],D(h,a),0==h.length&&delete X[d]);xa(a);if(h=W[f][g][d])D(h,a),0==h.length&&(delete W[f][g][d],W[f][g].a--),0==W[f][g].a&&(delete W[f][g],W[f].a--),0==W[f].a&&delete W[f];delete ya[a.key]}}return c.call(e,b)},za=function(a,b){if(a.c)return!0;var c=a.type,e=W;if(!(c in e))return!0;var e=
+e[c],d,f;if(!sa){if(!(c=b))n:{for(var c=["window","event"],g=q;d=c.shift();)if(null!=g[d])g=g[d];else{c=null;break n}c=g}d=c;c=!0 in e;g=!1 in e;if(c){if(0>d.keyCode||void 0!=d.returnValue)return!0;n:{var h=!1;if(0==d.keyCode)try{d.keyCode=-1;break n}catch(p){h=!0}if(h||void 0==d.returnValue)d.returnValue=!0}}h=new V(d,this);d=!0;try{if(c){for(var l=[],k=h.currentTarget;k;k=k.parentNode)l.push(k);f=e[!0];for(var m=l.length-1;!h.f&&0<=m;m--)h.currentTarget=l[m],d&=Da(f,l[m],h);if(g)for(f=e[!1],m=0;!h.f&&
+m<l.length;m++)h.currentTarget=l[m],d&=Da(f,l[m],h)}else d=Z(a,h)}finally{l&&(l.length=0)}return d}return Z(a,new V(b,this))},Ea="__closure_events_fn_"+(1E9*Math.random()>>>0),Ca=function(a){A(a,"Listener can not be null.");if("function"==r(a))return a;A(a.handleEvent,"An object listener must have handleEvent method.");return a[Ea]||(a[Ea]=function(b){return a.handleEvent(b)})};var $=function(){};$.l=function(){$.m||($.m=new $)};$.l();K||M&&R("525");w("ae.init",function(){Fa();Ga();Ba(window,"load",function(){});Ha()});
+var Fa=function(){var a;if(a=s("ae-content")?document.getElementById("ae-content"):"ae-content"){a=S("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=S("tbody",null,c);for(var e=0,d;d=c[e];e++){d=S("tr",null,d);for(var f=0,g;g=d[f];f++)f%2&&pa(g,"ae-even")}}}},Ga=function(){var a=S(null,"ae-noscript",void 0);ba(E(a),function(a){ra(a,"ae-noscript")})},Ha=function(){q._gaq=q._gaq||[];q._gaq.push(function(){q._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
+document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};w("ae.trackPageView",function(){q._gaq&&q._gaq._getAsyncTracker("ae")._trackPageview()});var Ja=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Ia(a)},Ia=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Ka=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
+La=function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Ja(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
+v(Ka,a,e,d))};w("ae.Charts.latencyHistogram",function(a,b,c){var e=new google.visualization.DataTable;e.addColumn("string","");e.addColumn("number","");for(var d=0;d<b.length;d++)e.addRow([""+a[d],b[d]]);for(d=b.length;d<a.length;d++)e.addRow([""+a[d],0]);b=Ja(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(e,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
+w("ae.Charts.latencyTimestampScatter",function(a,b,c,e,d){var f=new google.visualization.DataTable;f.addColumn("number","Time (seconds from start)");f.addColumn("number","Latency");for(var g=0;g<a.length;g++){var h=Math.round(a[g]-c);f.addRow([h,b[g]])}a=e.starttime?e.starttime:0;b=new google.visualization.ScatterChart(document.getElementById("LatencyVsTimestamp"));b.draw(f,{hAxis:{title:"Time (seconds from start of recording)",minValue:a},vAxis:{title:"Request Latency (milliseconds)",minValue:0},
+tooltip:{trigger:"none"},legend:"none"});google.visualization.events.addListener(b,"select",v(Ka,b,e,d))});
+w("ae.Charts.entityCountBarChart",function(a,b,c,e){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","Reads");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Misses");d.addColumn({type:"string",role:"tooltip"});d.addColumn("number","Writes");d.addColumn({type:"string",role:"tooltip"});var f=50;f>b.length&&(f=b.length);for(var g=0;g<f;g++)d.addRow(["",b[g][1]-b[g][3],b[g][0],b[g][3],b[g][0],b[g][2],b[g][0]]);b=20*f;f=b+130;a=new google.visualization.ColumnChart(document.getElementById(e+
+"-"+a));c=Ia(c);a.draw(d,{height:100,width:f,chartArea:{width:b},fontSize:10,isStacked:!0,vAxis:{minValue:0,maxValue:c,gridlines:{count:5}}})});
+w("ae.Charts.rpcVariationCandlestick",function(a){var b=new google.visualization.DataTable;b.addColumn("string","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addColumn("number","");b.addRows(a);(new google.visualization.CandlestickChart(document.getElementById("rpcvariation"))).draw(b,{vAxis:{title:"RPC Latency variation (milliseconds)"},hAxis:{textPosition:"out",slantedText:!0,slantedTextAngle:45,textStyle:{fontSize:13}},height:250,chartArea:{top:10,height:100},
+legend:"none",tooltip:{trigger:"none"}})});w("ae.Charts.totalTimeBarChart",function(a,b,c,e){for(var d=[],f=0;f<b.length;f++)d[f]=b[f]+" milliseconds";La(a,b,d,c,e)});w("ae.Charts.rpcTimeBarChart",function(a,b,c,e,d){var f=[],g=[],h=c.indices,p=c.times;c=c.stats;for(var l=0;l<b;l++)f[l]=0,g[l]=null;for(l=0;l<h.length;l++){f[h[l]]=p[l];b=c[l];var k="Calls: "+b[0];if(0<b[1]||0<b[2]||0<b[3])k+=" Entities";0<b[1]&&(k+=" R:"+b[1]);0<b[2]&&(k+=" W:"+b[2]);0<b[3]&&(k+=" M:"+b[3]);g[h[l]]=k}La(a,f,g,e,d)});})();
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index 4f7de01..f749f53 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -1,84 +1,84 @@
-/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,m=this,aa=function(){},ba=function(a){a.ea=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
-typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},p=function(a){return"string"==typeof a},q=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},r=function(a){return a[ga]||
-(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=m;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},s=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor=
-a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};s(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},ma=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},sa=function(a){if(!na.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(oa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(pa,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(qa,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(ra,"&quot;"));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,na=/[&<>\"]/;var ta=function(a,b){b.unshift(a);ka.call(this,la.apply(null,b));b.shift()};s(ta,ka);ta.prototype.name="AssertionError";var ua=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ta(""+d,e||[]);},t=function(a,b,c){a||ua("",b,Array.prototype.slice.call(arguments,2))},va=function(a,b,c,d){a instanceof b||ua("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var u=Array.prototype,wa=u.indexOf?function(a,b,c){t(null!=a.length);return u.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(p(a))return p(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},xa=u.forEach?function(a,b,c){t(null!=a.length);u.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=p(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},ya=u.filter?function(a,b,c){t(null!=a.length);return u.filter.call(a,
-b,c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=p(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var l=h[k];b.call(c,l,k,a)&&(e[g++]=l)}return e},za=u.every?function(a,b,c){t(null!=a.length);return u.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=p(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},w=function(a,b){return 0<=wa(a,b)},Ba=function(a,b){var c=wa(a,b),d;(d=0<=c)&&Aa(a,c);return d},Aa=function(a,b){t(null!=a.length);return 1==u.splice.call(a,b,1).length},
-Ca=function(a){var b=a.length;if(0<b){for(var c=Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ea=function(a,b,c,d){t(null!=a.length);u.splice.apply(a,Da(arguments,1))},Da=function(a,b,c){t(null!=a.length);return 2>=arguments.length?u.slice.call(a,b):u.slice.call(a,b,c)};var Fa=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ga=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Ha=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ia=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ja="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ka=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ja.length;g++)c=
-Ja[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var La,Ma,Na,Oa,Pa=function(){return m.navigator?m.navigator.userAgent:null};Oa=Na=Ma=La=!1;var Qa;if(Qa=Pa()){var Ra=m.navigator;La=0==Qa.indexOf("Opera");Ma=!La&&-1!=Qa.indexOf("MSIE");Na=!La&&-1!=Qa.indexOf("WebKit");Oa=!La&&!Na&&"Gecko"==Ra.product}var Sa=La,x=Ma,y=Oa,z=Na,Ta=m.navigator,A=-1!=(Ta&&Ta.platform||"").indexOf("Mac"),Ua=function(){var a=m.document;return a?a.documentMode:void 0},Va;
-t:{var Wa="",Xa;if(Sa&&m.opera)var Ya=m.opera.version,Wa="function"==typeof Ya?Ya():Ya;else if(y?Xa=/rv\:([^\);]+)(\)|;)/:x?Xa=/MSIE\s+([^\);]+)(\)|;)/:z&&(Xa=/WebKit\/(\S+)/),Xa)var Za=Xa.exec(Pa()),Wa=Za?Za[1]:"";if(x){var $a=Ua();if($a>parseFloat(Wa)){Va=String($a);break t}}Va=Wa}
-var ab=Va,bb={},B=function(a){var b;if(!(b=bb[a])){b=0;for(var c=ma(String(ab)).split("."),d=ma(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",l=RegExp("(\\d*)(\\D*)","g"),H=RegExp("(\\d*)(\\D*)","g");do{var n=l.exec(h)||["","",""],v=H.exec(k)||["","",""];if(0==n[0].length&&0==v[0].length)break;b=((0==n[1].length?0:parseInt(n[1],10))<(0==v[1].length?0:parseInt(v[1],10))?-1:(0==n[1].length?0:parseInt(n[1],10))>(0==v[1].length?0:parseInt(v[1],10))?1:
-0)||((0==n[2].length)<(0==v[2].length)?-1:(0==n[2].length)>(0==v[2].length)?1:0)||(n[2]<v[2]?-1:n[2]>v[2]?1:0)}while(0==b)}b=bb[a]=0<=b}return b},cb=m.document,db=cb&&x?Ua()||("CSS1Compat"==cb.compatMode?parseInt(ab,10):5):void 0;var eb,fb=!x||x&&9<=db;!y&&!x||x&&x&&9<=db||y&&B("1.9.1");var gb=x&&!B("9");var hb=function(a){a=a.className;return p(a)&&a.match(/\S+/g)||[]},C=function(a,b){for(var c=hb(a),d=Da(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)w(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},jb=function(a,b){var c=hb(a),d=Da(arguments,1),e=ib(c,d);a.className=e.join(" ");return e.length==c.length-d.length},ib=function(a,b){return ya(a,function(a){return!w(b,a)})};var mb=function(a){return a?new kb(lb(a)):eb||(eb=new kb)},nb=function(a){return p(a)?document.getElementById(a):a},ob=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=
-h.className,"function"==typeof a.split&&w(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},qb=function(a,b){Fa(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in pb?a.setAttribute(pb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},pb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",
-usemap:"useMap",valign:"vAlign",width:"width"},sb=function(a,b,c){return rb(document,arguments)},rb=function(a,b){var c=b[0],d=b[1];if(!fb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',sa(d.name),'"');if(d.type){c.push(' type="',sa(d.type),'"');var e={};Ka(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(p(d)?c.className=d:da(d)?C.apply(null,[c].concat(d)):qb(c,d));2<b.length&&tb(a,c,b);return c},tb=function(a,b,c){function d(c){c&&b.appendChild(p(c)?a.createTextNode(c):
-c)}for(var e=2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(q(g)){h="function"==typeof g.item;break t}}h=!1}xa(h?Ca(g):g,d)}}},ub=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},lb=function(a){return 9==
-a.nodeType?a:a.ownerDocument||a.document},vb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(lb(a).createTextNode(String(b)))}},wb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},xb={IMG:" ",BR:"\n"},yb=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,"number"==typeof a&&0<=a&&
-32768>a):!1},zb=function(a,b,c){if(!(a.nodeName in wb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in xb)b.push(xb[a.nodeName]);else for(a=a.firstChild;a;)zb(a,b,c),a=a.nextSibling},kb=function(a){this.D=a||m.document||document};f=kb.prototype;f.ib=mb;f.a=function(a){return p(a)?this.D.getElementById(a):a};f.q=function(a,b,c){return rb(this.D,arguments)};f.createElement=function(a){return this.D.createElement(a)};
-f.createTextNode=function(a){return this.D.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=ub;var Ab=function(a){Ab[" "](a);return a};Ab[" "]=aa;var Bb=!x||x&&9<=db,Cb=!x||x&&9<=db,Db=x&&!B("9");!z||B("528");y&&B("1.9b")||x&&B("8")||Sa&&B("9.5")||z&&B("528");y&&!B("8")||x&&B("9");var Eb=function(){};Eb.prototype.Sb=!1;var D=function(a,b){this.type=a;this.currentTarget=this.target=b};f=D.prototype;f.U=!1;f.defaultPrevented=!1;f.xb=!0;f.stopPropagation=function(){this.U=!0};f.preventDefault=function(){this.defaultPrevented=!0;this.xb=!1};var E=function(a,b){a&&Fb(this,a,b)};s(E,D);var Gb=[1,4,2];f=E.prototype;f.target=null;f.relatedTarget=null;f.offsetX=0;f.offsetY=0;f.clientX=0;f.clientY=0;f.screenX=0;f.screenY=0;f.button=0;f.keyCode=0;f.charCode=0;f.ctrlKey=!1;f.altKey=!1;f.shiftKey=!1;f.metaKey=!1;f.$a=!1;f.T=null;
-var Fb=function(a,b,c){var d=a.type=b.type;D.call(a,d);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(y){var e;t:{try{Ab(c.nodeName);e=!0;break t}catch(g){}e=!1}e||(c=null)}}else"mouseover"==d?c=b.fromElement:"mouseout"==d&&(c=b.toElement);a.relatedTarget=c;a.offsetX=z||void 0!==b.offsetX?b.offsetX:b.layerX;a.offsetY=z||void 0!==b.offsetY?b.offsetY:b.layerY;a.clientX=void 0!==b.clientX?b.clientX:b.pageX;a.clientY=void 0!==b.clientY?b.clientY:b.pageY;a.screenX=b.screenX||
-0;a.screenY=b.screenY||0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==d?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.$a=A?b.metaKey:b.ctrlKey;a.state=b.state;a.T=b;b.defaultPrevented&&a.preventDefault();delete a.U},Hb=function(a){return Bb?0==a.T.button:"click"==a.type?!0:!!(a.T.button&Gb[0])};
-E.prototype.stopPropagation=function(){E.f.stopPropagation.call(this);this.T.stopPropagation?this.T.stopPropagation():this.T.cancelBubble=!0};E.prototype.preventDefault=function(){E.f.preventDefault.call(this);var a=this.T;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Db)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Ib="closure_listenable_"+(1E6*Math.random()|0),Jb=0;var Kb=function(a,b,c,d,e,g){this.H=a;this.zb=b;this.src=c;this.type=d;this.capture=!!e;this.Z=g;this.key=++Jb;this.G=this.ja=!1};var Lb={},F={},Mb={},Nb={},G=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)G(a,b[g],c,d,e);return null}c=Ob(c);if(a&&a[Ib])a=a.d(b,c,d,e);else t:{if(!b)throw Error("Invalid event type");d=!!d;var h=F;b in h||(h[b]={L:0,M:0});h=h[b];d in h||(h[d]={L:0,M:0},h.L++);var h=h[d],g=r(a),k;h.M++;if(h[g]){k=h[g];for(var l=0;l<k.length;l++)if(h=k[l],h.H==c&&h.Z==e){if(h.G)break;k[l].ja=!1;a=k[l];break t}}else k=h[g]=[],h.L++;l=Pb();h=new Kb(c,l,a,b,d,e);h.ja=!1;l.src=a;l.H=h;k.push(h);Mb[g]||(Mb[g]=
-[]);Mb[g].push(h);a.addEventListener?a.addEventListener(b,l,d):a.attachEvent(b in Nb?Nb[b]:Nb[b]="on"+b,l);a=Lb[h.key]=h}return a},Pb=function(){var a=Qb,b=Cb?function(c){return a.call(b.src,b.H,c)}:function(c){c=a.call(b.src,b.H,c);if(!c)return c};return b},Rb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Rb(a,b[g],c,d,e);else if(c=Ob(c),a&&a[Ib])a.u(b,c,d,e);else if(d=!!d,a=Sb(a,b,d))for(g=0;g<a.length;g++)if(a[g].H==c&&a[g].capture==d&&a[g].Z==e){I(a[g]);break}},I=function(a){if("number"==
-typeof a||!a||a.G)return!1;var b=a.src;if(b&&b[Ib])return Tb(b,a);var c=a.type,d=a.zb,e=a.capture;b.removeEventListener?b.removeEventListener(c,d,e):b.detachEvent&&b.detachEvent(c in Nb?Nb[c]:Nb[c]="on"+c,d);b=r(b);Mb[b]&&(d=Mb[b],Ba(d,a),0==d.length&&delete Mb[b]);a.G=!0;a.H=null;a.zb=null;a.src=null;a.Z=null;if(d=F[c][e][b])d.wb=!0,Ub(c,e,b,d);delete Lb[a.key];return!0},Ub=function(a,b,c,d){if(!d.Ka&&d.wb){for(var e=0,g=0;e<d.length;e++)d[e].G||(e!=g&&(d[g]=d[e]),g++);d.length=g;d.wb=!1;0==g&&(delete F[a][b][c],
-F[a][b].L--,0==F[a][b].L&&(delete F[a][b],F[a].L--),0==F[a].L&&delete F[a])}},Sb=function(a,b,c){var d=F;return b in d&&(d=d[b],c in d&&(d=d[c],a=r(a),d[a]))?d[a]:null},Wb=function(a,b,c,d,e){var g=1;b=r(b);if(a[b]){var h=--a.M,k=a[b];k.Ka?k.Ka++:k.Ka=1;try{for(var l=k.length,H=0;H<l;H++){var n=k[H];n&&!n.G&&(g&=!1!==Vb(n,e))}}finally{a.M=Math.max(h,a.M),k.Ka--,Ub(c,d,b,k)}}return Boolean(g)},Vb=function(a,b){var c=a.H,d=a.Z||a.src;a.ja&&I(a);return c.call(d,b)},Qb=function(a,b){if(a.G)return!0;var c=
-a.type,d=F;if(!(c in d))return!0;var d=d[c],e,g;if(!Cb){var h;if(!(h=b))t:{h=["window","event"];for(var k=m;e=h.shift();)if(null!=k[e])k=k[e];else{h=null;break t}h=k}e=h;h=!0 in d;k=!1 in d;if(h){if(0>e.keyCode||void 0!=e.returnValue)return!0;t:{var l=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(H){l=!0}if(l||void 0==e.returnValue)e.returnValue=!0}}l=new E;Fb(l,e,this);e=!0;try{if(h){for(var n=[],v=l.currentTarget;v;v=v.parentNode)n.push(v);g=d[!0];g.M=g.L;for(var M=n.length-1;!l.U&&0<=M&&g.M;M--)l.currentTarget=
-n[M],e&=Wb(g,n[M],c,!0,l);if(k)for(g=d[!1],g.M=g.L,M=0;!l.U&&M<n.length&&g.M;M++)l.currentTarget=n[M],e&=Wb(g,n[M],c,!1,l)}else e=Vb(a,l)}finally{n&&(n.length=0)}return e}c=new E(b,this);return e=Vb(a,c)},Xb="__closure_events_fn_"+(1E9*Math.random()>>>0),Ob=function(a){t(a,"Listener can not be null.");if(q(a))return a;t(a.handleEvent,"An object listener must have handleEvent method.");return a[Xb]||(a[Xb]=function(b){return a.handleEvent(b)})};var J=function(a){this.tb=a;this.Ha={}};s(J,Eb);var Yb=[];J.prototype.d=function(a,b,c,d,e){da(b)||(Yb[0]=b,b=Yb);for(var g=0;g<b.length;g++){var h=G(a,b[g],c||this,d||!1,e||this.tb||this);if(!h)break;this.Ha[h.key]=h}return this};
-J.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else{t:if(e=e||this.tb||this,d=!!d,c=Ob(c||this),a&&a[Ib])a=a.J[b],b=-1,a&&(b=Zb(a,c,d,e)),e=-1<b?a[b]:null;else{if(a=Sb(a,b,d))for(b=0;b<a.length;b++)if(!a[b].G&&a[b].H==c&&a[b].capture==d&&a[b].Z==e){e=a[b];break t}e=null}e&&(I(e),delete this.Ha[e.key])}return this};var $b=function(a){Fa(a.Ha,I);a.Ha={}};J.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var K=function(){this.J={};this.bc=this};s(K,Eb);K.prototype[Ib]=!0;f=K.prototype;f.kb=null;f.Xa=function(a){this.kb=a};f.addEventListener=function(a,b,c,d){G(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Rb(this,a,b,c,d)};
-f.dispatchEvent=function(a){ac(this);var b,c=this.kb;if(c){b=[];for(var d=1;c;c=c.kb)b.push(c),t(1E3>++d,"infinite loop")}c=this.bc;d=a.type||a;if(p(a))a=new D(a,c);else if(a instanceof D)a.target=a.target||c;else{var e=a;a=new D(d,c);Ka(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.U&&0<=h;h--)g=a.currentTarget=b[h],e=bc(g,d,!0,a)&&e;a.U||(g=a.currentTarget=c,e=bc(g,d,!0,a)&&e,a.U||(e=bc(g,d,!1,a)&&e));if(b)for(h=0;!a.U&&h<b.length;h++)g=a.currentTarget=b[h],e=bc(g,d,!1,a)&&e;return e};
-var ac=function(a){t(a.J,"Event target is not initialized. Did you call superclass (goog.events.EventTarget) constructor?")};K.prototype.d=function(a,b,c,d){ac(this);var e=this.J[a]||(this.J[a]=[]),g=Zb(e,b,c,d);-1<g?(a=e[g],a.ja=!1):(a=new Kb(b,null,this,a,!!c,d),a.ja=!1,e.push(a));return a};K.prototype.u=function(a,b,c,d){if(!(a in this.J))return!1;a=this.J[a];b=Zb(a,b,c,d);return-1<b?(c=a[b],delete Lb[c.key],c.G=!0,Aa(a,b)):!1};
-var Tb=function(a,b){var c=b.type;if(!(c in a.J))return!1;if(c=Ba(a.J[c],b))delete Lb[b.key],b.G=!0;return c},bc=function(a,b,c,d){if(!(b in a.J))return!0;var e=!0;b=Ca(a.J[b]);for(var g=0;g<b.length;++g){var h=b[g];if(h&&!h.G&&h.capture==c){var k=h.H,l=h.Z||h.src;h.ja&&Tb(a,h);e=!1!==k.call(l,d)&&e}}return e&&!1!=d.xb},Zb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(g.H==b&&g.capture==!!c&&g.Z==d)return e}return-1};var L=function(a,b){a.style.display=b?"":"none"},cc=y?"MozUserSelect":z?"WebkitUserSelect":null,dc=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(cc){if(b=b?"none":"",a.style[cc]=b,c){a=0;for(var d;d=c[a];a++)d.style[cc]=b}}else if(x||Sa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var ec=function(){};ba(ec);ec.prototype.ec=0;ec.ea();var N=function(a){K.call(this);this.n=a||mb();this.va=fc};s(N,K);N.prototype.dc=ec.ea();var fc=null,gc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=N.prototype;f.S=null;f.e=!1;f.c=null;f.va=null;f.o=null;f.r=null;f.i=null;
-var hc=function(a){return a.S||(a.S=":"+(a.dc.ec++).toString(36))},ic=function(a,b){if(a.o&&a.o.i){var c=a.o.i,d=a.S;d in c&&delete c[d];Ha(a.o.i,b,a)}a.S=b};N.prototype.a=function(){return this.c};var jc=function(a){return a.hb||(a.hb=new J(a))},kc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.o&&a.S&&a.o.i&&a.S&&(a.S in a.o.i&&a.o.i[a.S])&&a.o!=b)throw Error("Unable to set parent component");a.o=b;N.f.Xa.call(a,b)};f=N.prototype;f.getParent=function(){return this.o};
-f.Xa=function(a){if(this.o&&this.o!=a)throw Error("Method not supported");N.f.Xa.call(this,a)};f.ib=function(){return this.n};f.q=function(){this.c=this.n.createElement("div")};f.O=function(a){if(this.e)throw Error("Component already rendered");if(a&&this.da(a)){var b=lb(a);this.n&&this.n.D==b||(this.n=mb(a));this.Wa(a);this.F()}else throw Error("Invalid element to decorate");};f.da=function(){return!0};f.Wa=function(a){this.c=a};f.F=function(){this.e=!0;lc(this,function(a){!a.e&&a.a()&&a.F()})};
-f.ga=function(){lc(this,function(a){a.e&&a.ga()});this.hb&&$b(this.hb);this.e=!1};f.Da=function(a,b){this.Sa(a,mc(this),b)};
-f.Sa=function(a,b,c){if(a.e&&(c||!this.e))throw Error("Component already rendered");if(0>b||b>mc(this))throw Error("Child component index out of bounds");this.i&&this.r||(this.i={},this.r=[]);if(a.getParent()==this){var d=hc(a);this.i[d]=a;Ba(this.r,a)}else Ha(this.i,hc(a),a);kc(a,this);Ea(this.r,b,0,a);if(a.e&&this.e&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.c||this.q();c=O(this,b+1);b=this.B();c=c?c.c:null;if(a.e)throw Error("Component already rendered");
-a.c||a.q();b?b.insertBefore(a.c,c||null):a.n.D.body.appendChild(a.c);a.o&&!a.o.e||a.F()}else this.e&&(!a.e&&a.c&&a.c.parentNode&&1==a.c.parentNode.nodeType)&&a.F()};f.B=function(){return this.c};
-var nc=function(a){if(null==a.va){var b;t:{b=a.e?a.c:a.n.D.body;var c=lb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,null))){b=b.direction||b.getPropertyValue("direction")||"";break t}b=""}a.va="rtl"==(b||((a.e?a.c:a.n.D.body).currentStyle?(a.e?a.c:a.n.D.body).currentStyle.direction:null)||(a.e?a.c:a.n.D.body).style&&(a.e?a.c:a.n.D.body).style.direction)}return a.va};
-N.prototype.ta=function(a){if(this.e)throw Error("Component already rendered");this.va=a};var mc=function(a){return a.r?a.r.length:0},O=function(a,b){return a.r?a.r[b]||null:null},lc=function(a,b,c){a.r&&xa(a.r,b,c)},oc=function(a,b){return a.r&&b?wa(a.r,b):-1};
-N.prototype.removeChild=function(a,b){if(a){var c=p(a)?a:hc(a);a=this.i&&c?(c in this.i?this.i[c]:void 0)||null:null;if(c&&a){var d=this.i;c in d&&delete d[c];Ba(this.r,a);b&&(a.ga(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));kc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var pc,qc={kc:"activedescendant",pc:"atomic",qc:"autocomplete",sc:"busy",vc:"checked",Ac:"controls",Cc:"describedby",Fc:"disabled",Hc:"dropeffect",Ic:"expanded",Jc:"flowto",Lc:"grabbed",Pc:"haspopup",Rc:"hidden",Tc:"invalid",Uc:"label",Vc:"labelledby",Wc:"level",ad:"live",ld:"multiline",md:"multiselectable",qd:"orientation",rd:"owns",sd:"posinset",ud:"pressed",yd:"readonly",Ad:"relevant",Bd:"required",Hd:"selected",Jd:"setsize",Ld:"sort",Yd:"valuemax",Zd:"valuemin",$d:"valuenow",ae:"valuetext"};var rc={lc:"alert",mc:"alertdialog",nc:"application",oc:"article",rc:"banner",tc:"button",uc:"checkbox",wc:"columnheader",xc:"combobox",yc:"complementary",zc:"contentinfo",Bc:"definition",Dc:"dialog",Ec:"directory",Gc:"document",Kc:"form",Mc:"grid",Nc:"gridcell",Oc:"group",Qc:"heading",Sc:"img",Xc:"link",Yc:"list",Zc:"listbox",$c:"listitem",bd:"log",cd:"main",dd:"marquee",ed:"math",fd:"menu",gd:"menubar",hd:"menuitem",jd:"menuitemcheckbox",kd:"menuitemradio",nd:"navigation",od:"note",pd:"option",
-td:"presentation",vd:"progressbar",wd:"radio",xd:"radiogroup",zd:"region",Cd:"row",Dd:"rowgroup",Ed:"rowheader",Fd:"scrollbar",Gd:"search",Id:"separator",Kd:"slider",Md:"spinbutton",Nd:"status",Od:"tab",Pd:"tablist",Qd:"tabpanel",Rd:"textbox",Sd:"timer",Td:"toolbar",Ud:"tooltip",Vd:"tree",Wd:"treegrid",Xd:"treeitem"};var sc=function(a,b){b?(t(Ga(rc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},uc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=tc(b);""===c||void 0==c?(pc||(pc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=pc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
-c)},tc=function(a){t(a,"ARIA attribute cannot be empty.");t(Ga(qc,a),"No such ARIA attribute "+a);return"aria-"+a};var wc=function(a,b,c,d,e){if(!(x||z&&B("525")))return!0;if(A&&e)return vc(a);if(e&&!d||!c&&(17==b||18==b||A&&91==b))return!1;if(z&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(x&&d&&b==a)return!1;switch(a){case 13:return!(x&&x&&9<=db);case 27:return!z}return vc(a)},vc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||z&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},xc=function(a){switch(a){case 61:return 187;case 59:return 186;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){K.call(this);a&&yc(this,a,b)};s(P,K);f=P.prototype;f.c=null;f.Ea=null;f.Va=null;f.Fa=null;f.s=-1;f.R=-1;f.gb=!1;
-var zc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},Ac={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},Bc=x||z&&B("525"),Cc=A&&y;
-P.prototype.Qb=function(a){z&&(17==this.s&&!a.ctrlKey||18==this.s&&!a.altKey||A&&91==this.s&&!a.metaKey)&&(this.R=this.s=-1);-1==this.s&&(a.ctrlKey&&17!=a.keyCode?this.s=17:a.altKey&&18!=a.keyCode?this.s=18:a.metaKey&&91!=a.keyCode&&(this.s=91));Bc&&!wc(a.keyCode,this.s,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.R=y?xc(a.keyCode):a.keyCode,Cc&&(this.gb=a.altKey))};P.prototype.Rb=function(a){this.R=this.s=-1;this.gb=a.altKey};
-P.prototype.handleEvent=function(a){var b=a.T,c,d,e=b.altKey;x&&"keypress"==a.type?(c=this.R,d=13!=c&&27!=c?b.keyCode:0):z&&"keypress"==a.type?(c=this.R,d=0<=b.charCode&&63232>b.charCode&&vc(c)?b.charCode:0):Sa?(c=this.R,d=vc(c)?b.keyCode:0):(c=b.keyCode||this.R,d=b.charCode||0,Cc&&(e=this.gb),A&&(63==d&&224==c)&&(c=191));var g=c,h=b.keyIdentifier;c?63232<=c&&c in zc?g=zc[c]:25==c&&a.shiftKey&&(g=9):h&&h in Ac&&(g=Ac[h]);a=g==this.s;this.s=g;b=new Dc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-P.prototype.a=function(){return this.c};var yc=function(a,b,c){a.Fa&&a.detach();a.c=b;a.Ea=G(a.c,"keypress",a,c);a.Va=G(a.c,"keydown",a.Qb,c,a);a.Fa=G(a.c,"keyup",a.Rb,c,a)};P.prototype.detach=function(){this.Ea&&(I(this.Ea),I(this.Va),I(this.Fa),this.Fa=this.Va=this.Ea=null);this.c=null;this.R=this.s=-1};var Dc=function(a,b,c,d){d&&Fb(this,d,void 0);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};s(Dc,E);var Fc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!q(b))throw Error("Invalid decorator function "+b);Ec[a]=b},Gc={},Ec={};var Q=function(){};ba(Q);Q.prototype.$=function(){};var Hc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.q=function(a){return a.ib().q("div",this.wa(a).join(" "))};f.B=function(a){return a};f.da=function(a){return"DIV"==a.tagName};f.O=function(a,b){b.id&&ic(a,b.id);var c=this.w(),d=!1,e=hb(b);e&&xa(e,function(b){b==c?d=!0:b&&this.Za(a,b,c)},this);d||C(b,c);Ic(a,this.B(b));return b};
-f.Za=function(a,b,c){b==c+"-disabled"?a.sa(!1):b==c+"-horizontal"?Jc(a,"horizontal"):b==c+"-vertical"&&Jc(a,"vertical")};var Ic=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{e=void 0;for(var g=hb(c),h=0,k=g.length;h<k;h++)if(e=g[h]in Ec?Ec[g[h]]():null)break t;e=null}e&&(e.c=c,a.isEnabled()||e.sa(!1),a.Da(e),e.O(c))}else c.nodeValue&&""!=ma(c.nodeValue)||b.removeChild(c);c=d}};
-Q.prototype.La=function(a){a=a.a();t(a,"The container DOM element cannot be null.");dc(a,!0,y);x&&(a.hideFocus=!0);var b=this.$();b&&sc(a,b)};Q.prototype.k=function(a){return a.a()};Q.prototype.w=function(){return"goog-container"};Q.prototype.wa=function(a){var b=this.w(),c=[b,"horizontal"==a.P?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Kc;ba(R);f=R.prototype;f.$=function(){};f.q=function(a){var b=a.ib().q("div",this.wa(a).join(" "),a.Ca);Lc(a,b);return b};f.B=function(a){return a};f.ua=function(a,b,c){if(a=a.a?a.a():a)if(x&&!B("7")){var d=Mc(hb(a),b);d.push(b);ia(c?C:jb,a).apply(null,d)}else c?C(a,b):jb(a,b)};f.da=function(){return!0};
-f.O=function(a,b){b.id&&ic(a,b.id);var c=this.B(b);c&&c.firstChild?Nc(a,c.firstChild.nextSibling?Ca(c.childNodes):c.firstChild):a.Ca=null;var d=0,e=this.w(),g=this.w(),h=!1,k=!1,c=!1,l=hb(b);xa(l,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.rb||(this.Ga||Oc(this),this.rb=Ia(this.Ga));a=parseInt(this.rb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(l.push(e),g==e&&(k=!0));k||l.push(g);var H=a.I;H&&l.push.apply(l,H);if(x&&!B("7")){var n=Mc(l);0<n.length&&(l.push.apply(l,
-n),c=!0)}if(!h||!k||H||c)b.className=l.join(" ");Lc(a,b);return b};f.La=function(a){nc(a)&&this.ta(a.a(),!0);a.isEnabled()&&this.qa(a,a.t())};var Pc=function(a,b,c){if(a=c||a.$())t(b,"The element passed as a first parameter cannot be null."),sc(b,a)},Lc=function(a,b){t(a);t(b);a.t()||uc(b,"hidden",!a.t());a.isEnabled()||Qc(b,1,!a.isEnabled());a.m&8&&Qc(b,8,!!(a.g&8));a.m&16&&Qc(b,16,!!(a.g&16));a.m&64&&Qc(b,64,!!(a.g&64))};f=R.prototype;f.Aa=function(a,b){dc(a,!b,!x&&!Sa)};
-f.ta=function(a,b){this.ua(a,this.w()+"-rtl",b)};f.W=function(a){var b;return a.m&32&&(b=a.k())?yb(b):!1};f.qa=function(a,b){var c;if(a.m&32&&(c=a.k())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.oa(null)}yb(c)!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ma=function(a,b){L(a,b);a&&uc(a,"hidden",!b)};f.v=function(a,b,c){var d=a.a();if(d){var e=Rc(this,b);e&&this.ua(a,e,c);Qc(d,b,c)}};
-var Qc=function(a,b,c){Kc||(Kc={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Kc[b])t(a,"The element passed as a first parameter cannot be null."),uc(a,b,c)};R.prototype.k=function(a){return a.a()};R.prototype.w=function(){return"goog-control"};R.prototype.wa=function(a){var b=this.w(),c=[b],d=this.w();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Rc(this,e));b&=~e}c.push.apply(c,d);(a=a.I)&&c.push.apply(c,a);x&&!B("7")&&c.push.apply(c,Mc(c));return c};
-var Mc=function(a,b){var c=[];b&&(a=a.concat([b]));xa([],function(d){!za(d,ia(w,a))||b&&!w(d,b)||c.push(d.join("_"))});return c},Rc=function(a,b){a.Ga||Oc(a);return a.Ga[b]},Oc=function(a){var b=a.w();a.Ga={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){N.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=r(b);if(d=Gc[d])break;b=b.f?b.f.constructor:null}b=d?q(d.ea)?d.ea():new d:null}this.b=b;this.Ca=a};s(S,N);f=S.prototype;f.Ca=null;f.g=0;f.m=39;f.cc=255;f.aa=0;f.p=!0;f.I=null;f.fa=!0;f.ya=!1;f.ob=null;f.mb=function(){return this.fa};f.Ma=function(a){this.e&&a!=this.fa&&Sc(this,a);this.fa=a};f.k=function(){return this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P)};f.Ab=function(){return this.b};
-f.ua=function(a,b){b?a&&(this.I?w(this.I,a)||this.I.push(a):this.I=[a],this.b.ua(this,a,!0)):a&&(this.I&&Ba(this.I,a))&&(0==this.I.length&&(this.I=null),this.b.ua(this,a,!1))};f.q=function(){var a=this.b.q(this);this.c=a;Pc(this.b,a,this.ob);this.ya||this.b.Aa(a,!1);this.t()||this.b.ma(a,!1)};f.B=function(){return this.b.B(this.a())};f.da=function(a){return this.b.da(a)};f.Wa=function(a){this.c=a=this.b.O(this,a);Pc(this.b,a,this.ob);this.ya||this.b.Aa(a,!1);this.p="none"!=a.style.display};
-f.F=function(){S.f.F.call(this);this.b.La(this);if(this.m&-2&&(this.mb()&&Sc(this,!0),this.m&32)){var a=this.k();if(a){var b=this.za();yc(b,a);jc(this).d(b,"key",this.N).d(a,"focus",this.pa).d(a,"blur",this.oa)}}};
-var Sc=function(a,b){var c=jc(a),d=a.a();b?(c.d(d,"mouseover",a.Pa).d(d,"mousedown",a.na).d(d,"mouseup",a.Qa).d(d,"mouseout",a.Oa),a.ra!=aa&&c.d(d,"contextmenu",a.ra),x&&c.d(d,"dblclick",a.qb)):(c.u(d,"mouseover",a.Pa).u(d,"mousedown",a.na).u(d,"mouseup",a.Qa).u(d,"mouseout",a.Oa),a.ra!=aa&&c.u(d,"contextmenu",a.ra),x&&c.u(d,"dblclick",a.qb))};S.prototype.ga=function(){S.f.ga.call(this);this.ka&&this.ka.detach();this.t()&&this.isEnabled()&&this.b.qa(this,!1)};var Nc=function(a,b){a.Ca=b};f=S.prototype;
-f.ta=function(a){S.f.ta.call(this,a);var b=this.a();b&&this.b.ta(b,a)};f.Aa=function(a){this.ya=a;var b=this.a();b&&this.b.Aa(b,a)};f.t=function(){return this.p};f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ma(c,a);this.isEnabled()&&this.b.qa(this,a);this.p=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
-f.sa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.t()&&this.b.qa(this,a),this.v(1,!a))};f.C=function(a){T(this,2,a)&&this.v(2,a)};f.setActive=function(a){T(this,4,a)&&this.v(4,a)};var Tc=function(a,b){T(a,8,b)&&a.v(8,b)},Uc=function(a,b){T(a,64,b)&&a.v(64,b)};S.prototype.v=function(a,b){this.m&a&&b!=!!(this.g&a)&&(this.b.v(this,a,b),this.g=b?this.g|a:this.g&~a)};
-var Vc=function(a,b,c){if(a.e&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.v(b,!1);a.m=c?a.m|b:a.m&~b},U=function(a,b){return!!(a.cc&b)&&!!(a.m&b)},T=function(a,b,c){return!!(a.m&b)&&!!(a.g&b)!=c&&(!(a.aa&b)||a.dispatchEvent(gc(b,c)))&&!a.Sb};f=S.prototype;f.Pa=function(a){(!a.relatedTarget||!ub(this.a(),a.relatedTarget))&&(this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2))&&this.C(!0)};
-f.Oa=function(a){a.relatedTarget&&ub(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.C(!1))};f.ra=aa;f.na=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),!Hb(a)||z&&A&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.W(this)&&this.k().focus()));this.ya||(!Hb(a)||z&&A&&a.ctrlKey)||a.preventDefault()};f.Qa=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),this.g&4&&(Wc(this,a)&&U(this,4))&&this.setActive(!1))};
-f.qb=function(a){this.isEnabled()&&Wc(this,a)};var Wc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.v(16,c)}U(a,8)&&Tc(a,!0);U(a,64)&&Uc(a,!(a.g&64));c=new D("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.$a=b.$a);return a.dispatchEvent(c)};S.prototype.pa=function(){U(this,32)&&T(this,32,!0)&&this.v(32,!0)};S.prototype.oa=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.v(32,!1)};
-S.prototype.N=function(a){return this.t()&&this.isEnabled()&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.jb=function(a){return 13==a.keyCode&&Wc(this,a)};if(!q(S))throw Error("Invalid component class "+S);if(!q(R))throw Error("Invalid renderer class "+R);var Xc=r(S);Gc[Xc]=R;Fc("goog-control",function(){return new S(null)});var V=function(a,b,c){N.call(this,c);this.b=b||Q.ea();this.P=a||"vertical"};s(V,N);f=V.prototype;f.sb=null;f.ka=null;f.b=null;f.P=null;f.p=!0;f.ba=!0;f.Ya=!0;f.j=-1;f.h=null;f.ha=!1;f.Pb=!1;f.Ob=!0;f.Q=null;f.k=function(){return this.sb||this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P(this.k()))};f.Ab=function(){return this.b};f.q=function(){this.c=this.b.q(this)};f.B=function(){return this.b.B(this.a())};f.da=function(a){return this.b.da(a)};
-f.Wa=function(a){this.c=this.b.O(this,a);"none"==a.style.display&&(this.p=!1)};f.F=function(){V.f.F.call(this);lc(this,function(a){a.e&&Yc(this,a)},this);var a=this.a();this.b.La(this);this.ma(this.p,!0);jc(this).d(this,"enter",this.Ib).d(this,"highlight",this.Jb).d(this,"unhighlight",this.Lb).d(this,"open",this.Kb).d(this,"close",this.Gb).d(a,"mousedown",this.na).d(lb(a),"mouseup",this.Hb).d(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.W()&&Zc(this,!0)};
-var Zc=function(a,b){var c=jc(a),d=a.k();b?c.d(d,"focus",a.pa).d(d,"blur",a.oa).d(a.za(),"key",a.N):c.u(d,"focus",a.pa).u(d,"blur",a.oa).u(a.za(),"key",a.N)};f=V.prototype;f.ga=function(){$c(this,-1);this.h&&Uc(this.h,!1);this.ha=!1;V.f.ga.call(this)};f.Ib=function(){return!0};
-f.Jb=function(a){var b=oc(this,a.target);if(-1<b&&b!=this.j){var c=O(this,this.j);c&&c.C(!1);this.j=b;c=O(this,this.j);this.ha&&c.setActive(!0);this.Ob&&(this.h&&c!=this.h)&&(c.m&64?Uc(c,!0):Uc(this.h,!1))}b=this.a();t(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&uc(b,"activedescendant",a.target.a().id)};f.Lb=function(a){a.target==O(this,this.j)&&(this.j=-1);a=this.a();t(a,"The DOM element for the container cannot be null.");a.removeAttribute(tc("activedescendant"))};
-f.Kb=function(a){(a=a.target)&&(a!=this.h&&a.getParent()==this)&&(this.h&&Uc(this.h,!1),this.h=a)};f.Gb=function(a){a.target==this.h&&(this.h=null)};f.na=function(a){this.ba&&(this.ha=!0);var b=this.k();b&&yb(b)?b.focus():a.preventDefault()};f.Hb=function(){this.ha=!1};
-f.Fb=function(a){var b;t:{b=a.target;if(this.Q)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.Q){b=this.Q[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.na(a);break;case "mouseup":b.Qa(a);break;case "mouseover":b.Pa(a);break;case "mouseout":b.Oa(a);break;case "contextmenu":b.ra(a)}};f.pa=function(){};f.oa=function(){$c(this,-1);this.ha=!1;this.h&&Uc(this.h,!1)};
-f.N=function(a){return this.isEnabled()&&this.t()&&(0!=mc(this)||this.sb)&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.jb=function(a){var b=O(this,this.j);if(b&&"function"==typeof b.N&&b.N(a)||this.h&&this.h!=b&&"function"==typeof this.h.N&&this.h.N(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.W())this.k().blur();else return!1;break;case 36:ad(this);break;case 35:bd(this);break;case 38:if("vertical"==this.P)cd(this);else return!1;break;case 37:if("horizontal"==this.P)nc(this)?dd(this):cd(this);else return!1;break;case 40:if("vertical"==this.P)dd(this);else return!1;
-break;case 39:if("horizontal"==this.P)nc(this)?cd(this):dd(this);else return!1;break;default:return!1}return!0};var Yc=function(a,b){var c=b.a(),c=c.id||(c.id=hc(b));a.Q||(a.Q={});a.Q[c]=b};V.prototype.Da=function(a,b){va(a,S,"The child of a container must be a control");V.f.Da.call(this,a,b)};V.prototype.Sa=function(a,b,c){a.aa|=2;a.aa|=64;!this.W()&&this.Pb||Vc(a,32,!1);a.Ma(!1);V.f.Sa.call(this,a,b,c);a.e&&this.e&&Yc(this,a);b<=this.j&&this.j++};
-V.prototype.removeChild=function(a,b){if(a=p(a)?this.i&&a?(a in this.i?this.i[a]:void 0)||null:null:a){var c=oc(this,a);-1!=c&&(c==this.j?a.C(!1):c<this.j&&this.j--);var d=a.a();d&&(d.id&&this.Q)&&(c=this.Q,d=d.id,d in c&&delete c[d])}a=V.f.removeChild.call(this,a,b);a.Ma(!0);return a};var Jc=function(a,b){if(a.a())throw Error("Component already rendered");a.P=b};f=V.prototype;f.t=function(){return this.p};
-f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){this.p=a;var c=this.a();c&&(L(c,a),this.W()&&Hc(this.k(),this.ba&&this.p),b||this.dispatchEvent(this.p?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.ba};f.sa=function(a){this.ba!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.ba=!0,lc(this,function(a){a.ub?delete a.ub:a.sa(!0)})):(lc(this,function(a){a.isEnabled()?a.sa(!1):a.ub=!0}),this.ha=this.ba=!1),this.W()&&Hc(this.k(),a&&this.p))};
-f.W=function(){return this.Ya};f.qa=function(a){a!=this.Ya&&this.e&&Zc(this,a);this.Ya=a;this.ba&&this.p&&Hc(this.k(),a)};var $c=function(a,b){var c=O(a,b);c?c.C(!0):-1<a.j&&O(a,a.j).C(!1)};V.prototype.C=function(a){$c(this,oc(this,a))};
-var ad=function(a){ed(a,function(a,c){return(a+1)%c},mc(a)-1)},bd=function(a){ed(a,function(a,c){a--;return 0>a?c-1:a},0)},dd=function(a){ed(a,function(a,c){return(a+1)%c},a.j)},cd=function(a){ed(a,function(a,c){a--;return 0>a?c-1:a},a.j)},ed=function(a,b,c){c=0>c?oc(a,a.h):c;var d=mc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.t()&&g.isEnabled()&&g.m&2){a.Ta(c);break}e++;c=b.call(a,c,d)}};V.prototype.Ta=function(a){$c(this,a)};var fd=function(){};s(fd,R);ba(fd);f=fd.prototype;f.w=function(){return"goog-tab"};f.$=function(){return"tab"};f.q=function(a){var b=fd.f.q.call(this,a);(a=a.Ra())&&this.Ua(b,a);return b};f.O=function(a,b){b=fd.f.O.call(this,a,b);var c=this.Ra(b);c&&(a.pb=c);a.g&8&&(c=a.getParent())&&q(c.ca)&&(a.v(8,!1),c.ca(a));return b};f.Ra=function(a){return a.title||""};f.Ua=function(a,b){a&&(a.title=b||"")};var gd=function(a,b,c){S.call(this,a,b||fd.ea(),c);Vc(this,8,!0);this.aa|=9};s(gd,S);gd.prototype.Ra=function(){return this.pb};gd.prototype.Ua=function(a){this.Ab().Ua(this.a(),a);this.pb=a};Fc("goog-tab",function(){return new gd(null)});var W=function(){};s(W,Q);ba(W);W.prototype.w=function(){return"goog-tab-bar"};W.prototype.$=function(){return"tablist"};W.prototype.Za=function(a,b,c){this.Bb||(this.Ia||hd(this),this.Bb=Ia(this.Ia));var d=this.Bb[b];d?(Jc(a,id(d)),a.vb=d):W.f.Za.call(this,a,b,c)};W.prototype.wa=function(a){var b=W.f.wa.call(this,a);this.Ia||hd(this);b.push(this.Ia[a.vb]);return b};var hd=function(a){var b=a.w();a.Ia={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Jc(this,id(a));this.vb=a;V.call(this,this.P,b||W.ea(),c);jd(this)};s(X,V);f=X.prototype;f.Zb=!0;f.K=null;f.F=function(){X.f.F.call(this);jd(this)};f.removeChild=function(a,b){kd(this,a);return X.f.removeChild.call(this,a,b)};f.Ta=function(a){X.f.Ta.call(this,a);this.Zb&&this.ca(O(this,a))};f.ca=function(a){a?Tc(a,!0):this.K&&Tc(this.K,!1)};
-var kd=function(a,b){if(b&&b==a.K){for(var c=oc(a,b),d=c-1;b=O(a,d);d--)if(b.t()&&b.isEnabled()){a.ca(b);return}for(c+=1;b=O(a,c);c++)if(b.t()&&b.isEnabled()){a.ca(b);return}a.ca(null)}};f=X.prototype;f.Xb=function(a){this.K&&this.K!=a.target&&Tc(this.K,!1);this.K=a.target};f.Yb=function(a){a.target==this.K&&(this.K=null)};f.Vb=function(a){kd(this,a.target)};f.Wb=function(a){kd(this,a.target)};f.pa=function(){O(this,this.j)||this.C(this.K||O(this,0))};
-var jd=function(a){jc(a).d(a,"select",a.Xb).d(a,"unselect",a.Yb).d(a,"disable",a.Vb).d(a,"hide",a.Wb)},id=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Fc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,sc(a,h.$()),C(a,"goog-zippy-header"),ld(h,a),a&&h.Mb.d(a,"keydown",h.Nb))}K.call(this);this.n=e||mb();this.X=this.n.a(a)||null;this.Ba=this.n.a(d||null);this.ia=(this.Na=q(b)?b:null)||!b?null:this.n.a(b);this.l=!0==c;this.Mb=new J(this);this.nb=new J(this);var h=this;g(this.X);g(this.Ba);this.Y(this.l)};s(Y,K);f=Y.prototype;f.fa=!0;f.$=function(){return"tab"};f.B=function(){return this.ia};f.toggle=function(){this.Y(!this.l)};
-f.Y=function(a){this.ia?L(this.ia,a):a&&this.Na&&(this.ia=this.Na());this.ia&&C(this.ia,"goog-zippy-content");if(this.Ba)L(this.X,!a),L(this.Ba,a);else if(this.X){var b=this.X;a?C(b,"goog-zippy-expanded"):jb(b,"goog-zippy-expanded");b=this.X;a?jb(b,"goog-zippy-collapsed"):C(b,"goog-zippy-collapsed");uc(this.X,"expanded",a)}this.l=a;this.dispatchEvent(new md("toggle",this))};f.mb=function(){return this.fa};f.Ma=function(a){this.fa!=a&&((this.fa=a)?(ld(this,this.X),ld(this,this.Ba)):$b(this.nb))};
-var ld=function(a,b){b&&a.nb.d(b,"click",a.$b)};Y.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new D("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.$b=function(){this.toggle();this.dispatchEvent(new D("action",this))};var md=function(a,b){D.call(this,a,b)};s(md,D);var Z=function(a,b){this.lb=[];for(var c=nb(a),c=ob("span","ae-zippy",c),d=0,e;e=c[d];d++){var g;if(void 0!=e.parentNode.parentNode.parentNode.nextElementSibling)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.lb.push(e)}this.fc=new nd(this.lb,nb(b))};Z.prototype.ic=function(){return this.fc};Z.prototype.jc=function(){return this.lb};
-var nd=function(a,b){this.xa=a;if(this.xa.length)for(var c=0,d;d=this.xa[c];c++)G(d,"toggle",this.Ub,!1,this);this.Ja=0;this.l=!1;c="ae-toggle ae-plus ae-action";this.xa.length||(c+=" ae-disabled");this.V=sb("span",{className:c},"Expand All");G(this.V,"click",this.Tb,!1,this);b&&b.appendChild(this.V)};nd.prototype.Tb=function(){this.xa.length&&this.Y(!this.l)};
-nd.prototype.Ub=function(a){a=a.currentTarget;this.Ja=a.l?this.Ja+1:this.Ja-1;a.l!=this.l&&(a.l?(this.l=!0,od(this,!0)):0==this.Ja&&(this.l=!1,od(this,!1)))};nd.prototype.Y=function(a){this.l=a;a=0;for(var b;b=this.xa[a];a++)b.l!=this.l&&b.Y(this.l);od(this)};
-var od=function(a,b){(void 0!==b?b:a.l)?(jb(a.V,"ae-plus"),C(a.V,"ae-minus"),vb(a.V,"Collapse All")):(jb(a.V,"ae-minus"),C(a.V,"ae-plus"),vb(a.V,"Expand All"))},pd=function(a){this.ac=a;this.Db={};var b,c=sb("div",{},b=sb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),sb("div",{className:"goog-tab-bar-clear"}),a=sb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.O(b);G(d,"select",this.Cb,!1,this);G(d,"unselect",this.Cb,!1,this);b=
-0;for(var e;e=this.ac[b];b++)if(e=nb("ae-stats-details-"+e)){var g=ob("h2",null,e)[0],h;h=g;var k=void 0;gb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],zb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");gb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new gd(h);this.Db[r(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.ca(g):L(e,!1)}nb("bd").appendChild(c)};
-pd.prototype.Cb=function(a){var b=this.Db[r(a.target)];L(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",pd);ja("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.Y;ja("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.ic;Z.prototype.getZippys=Z.prototype.jc;nd.prototype.setExpanded=nd.prototype.Y;var $=function(){this.ab=[];this.fb=[]},qd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],rd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<qd.length;c++)if(a<=qd[c][2])return[qd[c][0],qd[c][1]*b,qd[c][2]*b];return[5,2*b,10*b]};$.prototype.eb="stats/static/pix.gif";$.prototype.A="ae-stats-gantt-";$.prototype.cb=0;$.prototype.write=function(a){this.fb.push(a)};
-var sd=function(a,b,c,d){a.write('<tr class="'+a.A+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.A+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.A+'tick" src="'+a.eb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.A+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.hc=function(){this.fb=[];var a=rd(this.cb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.A+'table">\n');sd(this,b,c,a);for(var d=0;d<this.ab.length;d++){var e=this.ab[d];this.write('<tr class="'+this.A+'datarow"><td width="20%">');0<e.label.length&&(0<e.la.length&&this.write('<a class="'+this.A+'link" href="'+e.la+'">'),this.write(e.label),0<e.la.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.A+'container">');0<e.la.length&&this.write('<a class="'+
-this.A+'link" href="'+e.la+'"\n>');this.write('<img class="'+this.A+'bar" src="'+this.eb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.bb&&(this.write('<img class="'+this.A+'extra" src="'+this.eb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.bb*a+'%"\n>'));0<e.yb.length&&(this.write('<span class="'+this.A+'inline" style="left:'+(e.start+Math.max(e.duration,e.bb))*a+'%">&nbsp;'),this.write(e.yb),this.write("</span>"));0<e.la.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}sd(this,b,c,a);this.write("</table>\n");return this.fb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.cb=Math.max(this.cb,Math.max(b+c,b+d));this.ab.push({label:a,start:b,duration:c,bb:d,yb:e,la:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})();
+/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.ca=function(){return a.Eb?a.Eb:a.Eb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
+typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},n=function(a){return"string"==typeof a},p=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},s=function(a){return a[ga]||
+(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,ia=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},ja=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},t=function(a,b){function c(){}c.prototype=b.prototype;a.f=b.prototype;a.prototype=new c;a.prototype.constructor=
+a};var ka=function(a){Error.captureStackTrace?Error.captureStackTrace(this,ka):this.stack=Error().stack||"";a&&(this.message=String(a))};t(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},ma=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},sa=function(a){if(!na.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(oa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(pa,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(qa,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(ra,"&quot;"));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,na=/[&<>\"]/;var ta=function(a,b){b.unshift(a);ka.call(this,la.apply(null,b));b.shift()};t(ta,ka);ta.prototype.name="AssertionError";var ua=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new ta(""+d,e||[]);},u=function(a,b,c){a||ua("",b,Array.prototype.slice.call(arguments,2))},va=function(a,b,c,d){a instanceof b||ua("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var w=Array.prototype,wa=w.indexOf?function(a,b,c){u(null!=a.length);return w.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(n(a))return n(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},xa=w.forEach?function(a,b,c){u(null!=a.length);w.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=n(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},ya=w.filter?function(a,b,c){u(null!=a.length);return w.filter.call(a,
+b,c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=n(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var m=h[k];b.call(c,m,k,a)&&(e[g++]=m)}return e},za=w.every?function(a,b,c){u(null!=a.length);return w.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=n(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},x=function(a,b){return 0<=wa(a,b)},y=function(a,b){var c=wa(a,b),d;if(d=0<=c)u(null!=a.length),w.splice.call(a,c,1);return d},Aa=function(a){var b=a.length;if(0<b){for(var c=
+Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ca=function(a,b,c,d){u(null!=a.length);w.splice.apply(a,Ba(arguments,1))},Ba=function(a,b,c){u(null!=a.length);return 2>=arguments.length?w.slice.call(a,b):w.slice.call(a,b,c)};var Da=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ea=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Fa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ga=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ha="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ia=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ha.length;g++)c=
+Ha[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Ja,Ka,La,Ma,Na=function(){return l.navigator?l.navigator.userAgent:null};Ma=La=Ka=Ja=!1;var Oa;if(Oa=Na()){var Pa=l.navigator;Ja=0==Oa.lastIndexOf("Opera",0);Ka=!Ja&&(-1!=Oa.indexOf("MSIE")||-1!=Oa.indexOf("Trident"));La=!Ja&&-1!=Oa.indexOf("WebKit");Ma=!Ja&&!La&&!Ka&&"Gecko"==Pa.product}var Qa=Ja,z=Ka,A=Ma,B=La,Ra=l.navigator,C=-1!=(Ra&&Ra.platform||"").indexOf("Mac"),Sa=function(){var a=l.document;return a?a.documentMode:void 0},Ta;
+t:{var Ua="",Va;if(Qa&&l.opera)var Wa=l.opera.version,Ua="function"==typeof Wa?Wa():Wa;else if(A?Va=/rv\:([^\);]+)(\)|;)/:z?Va=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:B&&(Va=/WebKit\/(\S+)/),Va)var Xa=Va.exec(Na()),Ua=Xa?Xa[1]:"";if(z){var Ya=Sa();if(Ya>parseFloat(Ua)){Ta=String(Ya);break t}}Ta=Ua}
+var Za=Ta,$a={},D=function(a){var b;if(!(b=$a[a])){b=0;for(var c=ma(String(Za)).split("."),d=ma(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",m=RegExp("(\\d*)(\\D*)","g"),v=RegExp("(\\d*)(\\D*)","g");do{var r=m.exec(h)||["","",""],q=v.exec(k)||["","",""];if(0==r[0].length&&0==q[0].length)break;b=((0==r[1].length?0:parseInt(r[1],10))<(0==q[1].length?0:parseInt(q[1],10))?-1:(0==r[1].length?0:parseInt(r[1],10))>(0==q[1].length?0:parseInt(q[1],10))?1:
+0)||((0==r[2].length)<(0==q[2].length)?-1:(0==r[2].length)>(0==q[2].length)?1:0)||(r[2]<q[2]?-1:r[2]>q[2]?1:0)}while(0==b)}b=$a[a]=0<=b}return b},ab=l.document,bb=ab&&z?Sa()||("CSS1Compat"==ab.compatMode?parseInt(Za,10):5):void 0;var cb,db=!z||z&&9<=bb;!A&&!z||z&&z&&9<=bb||A&&D("1.9.1");var eb=z&&!D("9");var fb=function(a){a=a.className;return n(a)&&a.match(/\S+/g)||[]},E=function(a,b){for(var c=fb(a),d=Ba(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)x(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},hb=function(a,b){var c=fb(a),d=Ba(arguments,1),e=gb(c,d);a.className=e.join(" ");return e.length==c.length-d.length},gb=function(a,b){return ya(a,function(a){return!x(b,a)})};var kb=function(a){return a?new ib(jb(a)):cb||(cb=new ib)},lb=function(a){return n(a)?document.getElementById(a):a},mb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=
+h.className,"function"==typeof a.split&&x(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},ob=function(a,b){Da(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in nb?a.setAttribute(nb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},nb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",
+usemap:"useMap",valign:"vAlign",width:"width"},qb=function(a,b,c){return pb(document,arguments)},pb=function(a,b){var c=b[0],d=b[1];if(!db&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',sa(d.name),'"');if(d.type){c.push(' type="',sa(d.type),'"');var e={};Ia(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(n(d)?c.className=d:da(d)?E.apply(null,[c].concat(d)):ob(c,d));2<b.length&&rb(a,c,b);return c},rb=function(a,b,c){function d(c){c&&b.appendChild(n(c)?a.createTextNode(c):
+c)}for(var e=2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(p(g)){h="function"==typeof g.item;break t}}h=!1}xa(h?Aa(g):g,d)}}},sb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},jb=function(a){return 9==
+a.nodeType?a:a.ownerDocument||a.document},tb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(jb(a).createTextNode(String(b)))}},ub={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},vb={IMG:" ",BR:"\n"},wb=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,"number"==typeof a&&0<=a&&
+32768>a):!1},xb=function(a,b,c){if(!(a.nodeName in ub))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in vb)b.push(vb[a.nodeName]);else for(a=a.firstChild;a;)xb(a,b,c),a=a.nextSibling},ib=function(a){this.F=a||l.document||document};f=ib.prototype;f.jb=kb;f.a=function(a){return n(a)?this.F.getElementById(a):a};f.r=function(a,b,c){return pb(this.F,arguments)};f.createElement=function(a){return this.F.createElement(a)};
+f.createTextNode=function(a){return this.F.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=sb;var yb=function(a){yb[" "](a);return a};yb[" "]=aa;var zb=!z||z&&9<=bb,Ab=!z||z&&9<=bb,Bb=z&&!D("9");!B||D("528");A&&D("1.9b")||z&&D("8")||Qa&&D("9.5")||B&&D("528");A&&!D("8")||z&&D("9");var Cb=function(){};Cb.prototype.Sb=!1;var F=function(a,b){this.type=a;this.currentTarget=this.target=b};f=F.prototype;f.T=!1;f.defaultPrevented=!1;f.wb=!0;f.stopPropagation=function(){this.T=!0};f.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var G=function(a,b){a&&Db(this,a,b)};t(G,F);var Eb=[1,4,2];f=G.prototype;f.target=null;f.relatedTarget=null;f.offsetX=0;f.offsetY=0;f.clientX=0;f.clientY=0;f.screenX=0;f.screenY=0;f.button=0;f.keyCode=0;f.charCode=0;f.ctrlKey=!1;f.altKey=!1;f.shiftKey=!1;f.metaKey=!1;f.ab=!1;f.R=null;
+var Db=function(a,b,c){var d=a.type=b.type;F.call(a,d);a.target=b.target||b.srcElement;a.currentTarget=c;if(c=b.relatedTarget){if(A){var e;t:{try{yb(c.nodeName);e=!0;break t}catch(g){}e=!1}e||(c=null)}}else"mouseover"==d?c=b.fromElement:"mouseout"==d&&(c=b.toElement);a.relatedTarget=c;a.offsetX=B||void 0!==b.offsetX?b.offsetX:b.layerX;a.offsetY=B||void 0!==b.offsetY?b.offsetY:b.layerY;a.clientX=void 0!==b.clientX?b.clientX:b.pageX;a.clientY=void 0!==b.clientY?b.clientY:b.pageY;a.screenX=b.screenX||
+0;a.screenY=b.screenY||0;a.button=b.button;a.keyCode=b.keyCode||0;a.charCode=b.charCode||("keypress"==d?b.keyCode:0);a.ctrlKey=b.ctrlKey;a.altKey=b.altKey;a.shiftKey=b.shiftKey;a.metaKey=b.metaKey;a.ab=C?b.metaKey:b.ctrlKey;a.state=b.state;a.R=b;b.defaultPrevented&&a.preventDefault();delete a.T},Fb=function(a){return zb?0==a.R.button:"click"==a.type?!0:!!(a.R.button&Eb[0])};
+G.prototype.stopPropagation=function(){G.f.stopPropagation.call(this);this.R.stopPropagation?this.R.stopPropagation():this.R.cancelBubble=!0};G.prototype.preventDefault=function(){G.f.preventDefault.call(this);var a=this.R;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Bb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Gb="closure_listenable_"+(1E6*Math.random()|0),Hb=0;var Ib=function(a,b,c,d,e,g){this.H=a;this.xb=b;this.src=c;this.type=d;this.capture=!!e;this.Y=g;this.key=++Hb;this.N=this.ja=!1},Jb=function(a){a.N=!0;a.H=null;a.xb=null;a.src=null;a.Y=null};var Kb={},H={},Lb={},Mb={},I=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)I(a,b[g],c,d,e);return null}c=Nb(c);if(a&&a[Gb])a=a.d(b,c,d,e);else t:{if(!b)throw Error("Invalid event type");d=!!d;var h=H;b in h||(h[b]={da:0});h=h[b];d in h||(h[d]={da:0},h.da++);var h=h[d],g=s(a),k;if(h[g]){k=h[g];for(var m=0;m<k.length;m++)if(h=k[m],h.H==c&&h.Y==e){if(h.N)break;k[m].ja=!1;a=k[m];break t}}else k=h[g]=[],h.da++;m=Ob();h=new Ib(c,m,a,b,d,e);h.ja=!1;m.src=a;m.H=h;k.push(h);Lb[g]||(Lb[g]=[]);Lb[g].push(h);
+a.addEventListener?a.addEventListener(b,m,d):a.attachEvent(b in Mb?Mb[b]:Mb[b]="on"+b,m);a=Kb[h.key]=h}return a},Ob=function(){var a=Pb,b=Ab?function(c){return a.call(b.src,b.H,c)}:function(c){c=a.call(b.src,b.H,c);if(!c)return c};return b},Qb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Qb(a,b[g],c,d,e);else if(c=Nb(c),a&&a[Gb])a.v(b,c,d,e);else if(d=!!d,a=Rb(a,b,d))for(g=0;g<a.length;g++)if(a[g].H==c&&a[g].capture==d&&a[g].Y==e){J(a[g]);break}},J=function(a){if("number"==typeof a||!a||
+a.N)return!1;var b=a.src;if(b&&b[Gb])return Sb(b.ea,a);var c=a.type,d=a.xb,e=a.capture;b.removeEventListener?b.removeEventListener(c,d,e):b.detachEvent&&b.detachEvent(c in Mb?Mb[c]:Mb[c]="on"+c,d);b=s(b);Lb[b]&&(d=Lb[b],y(d,a),0==d.length&&delete Lb[b]);Jb(a);if(d=H[c][e][b])y(d,a),0==d.length&&(delete H[c][e][b],H[c][e].da--),0==H[c][e].da&&(delete H[c][e],H[c].da--),0==H[c].da&&delete H[c];delete Kb[a.key];return!0},Rb=function(a,b,c){var d=H;return b in d&&(d=d[b],c in d&&(d=d[c],a=s(a),d[a]))?
+d[a]:null},Ub=function(a,b,c){var d=1;b=s(b);if(a[b])for(a=Aa(a[b]),b=0;b<a.length;b++){var e=a[b];e&&!e.N&&(d&=!1!==Tb(e,c))}return Boolean(d)},Tb=function(a,b){var c=a.H,d=a.Y||a.src;a.ja&&J(a);return c.call(d,b)},Pb=function(a,b){if(a.N)return!0;var c=a.type,d=H;if(!(c in d))return!0;var d=d[c],e,g;if(!Ab){if(!(c=b))t:{for(var c=["window","event"],h=l;e=c.shift();)if(null!=h[e])h=h[e];else{c=null;break t}c=h}e=c;c=!0 in d;h=!1 in d;if(c){if(0>e.keyCode||void 0!=e.returnValue)return!0;t:{var k=
+!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(m){k=!0}if(k||void 0==e.returnValue)e.returnValue=!0}}k=new G(e,this);e=!0;try{if(c){for(var v=[],r=k.currentTarget;r;r=r.parentNode)v.push(r);g=d[!0];for(var q=v.length-1;!k.T&&0<=q;q--)k.currentTarget=v[q],e&=Ub(g,v[q],k);if(h)for(g=d[!1],q=0;!k.T&&q<v.length;q++)k.currentTarget=v[q],e&=Ub(g,v[q],k)}else e=Tb(a,k)}finally{v&&(v.length=0)}return e}return Tb(a,new G(b,this))},Vb="__closure_events_fn_"+(1E9*Math.random()>>>0),Nb=function(a){u(a,"Listener can not be null.");
+if(p(a))return a;u(a.handleEvent,"An object listener must have handleEvent method.");return a[Vb]||(a[Vb]=function(b){return a.handleEvent(b)})};var K=function(a){this.yb=a;this.Ia={}};t(K,Cb);var Wb=[];K.prototype.d=function(a,b,c,d,e){da(b)||(Wb[0]=b,b=Wb);for(var g=0;g<b.length;g++){var h=I(a,b[g],c||this,d||!1,e||this.yb||this);if(!h)break;this.Ia[h.key]=h}return this};
+K.prototype.v=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.v(a,b[g],c,d,e);else{t:if(e=e||this.yb||this,d=!!d,c=Nb(c||this),a&&a[Gb])a=a.Za(b,c,d,e);else{if(a=Rb(a,b,d))for(b=0;b<a.length;b++)if(!a[b].N&&a[b].H==c&&a[b].capture==d&&a[b].Y==e){a=a[b];break t}a=null}a&&(J(a),delete this.Ia[a.key])}return this};K.prototype.Ya=function(){Da(this.Ia,J);this.Ia={}};K.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var Xb=function(a){this.src=a;this.q={}};Xb.prototype.add=function(a,b,c,d,e){var g=this.q[a];g||(g=this.q[a]=[]);var h=Yb(g,b,d,e);-1<h?(a=g[h],c||(a.ja=!1)):(a=new Ib(b,null,this.src,a,!!d,e),a.ja=c,g.push(a));return a};Xb.prototype.remove=function(a,b,c,d){if(!(a in this.q))return!1;var e=this.q[a];b=Yb(e,b,c,d);return-1<b?(Jb(e[b]),u(null!=e.length),w.splice.call(e,b,1),0==e.length&&delete this.q[a],!0):!1};
+var Sb=function(a,b){var c=b.type;if(!(c in a.q))return!1;var d=y(a.q[c],b);d&&(Jb(b),0==a.q[c].length&&delete a.q[c]);return d};Xb.prototype.Ya=function(a){var b=0,c;for(c in this.q)if(!a||c==a){for(var d=this.q[c],e=0;e<d.length;e++)++b,Jb(d[e]);delete this.q[c]}return b};Xb.prototype.Za=function(a,b,c,d){a=this.q[a];var e=-1;a&&(e=Yb(a,b,c,d));return-1<e?a[e]:null};var Yb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.N&&g.H==b&&g.capture==!!c&&g.Y==d)return e}return-1};var L=function(){this.ea=new Xb(this);this.bc=this};t(L,Cb);L.prototype[Gb]=!0;f=L.prototype;f.lb=null;f.Wa=function(a){this.lb=a};f.addEventListener=function(a,b,c,d){I(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Qb(this,a,b,c,d)};
+f.dispatchEvent=function(a){Zb(this);var b,c=this.lb;if(c){b=[];for(var d=1;c;c=c.lb)b.push(c),u(1E3>++d,"infinite loop")}c=this.bc;d=a.type||a;if(n(a))a=new F(a,c);else if(a instanceof F)a.target=a.target||c;else{var e=a;a=new F(d,c);Ia(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.T&&0<=h;h--)g=a.currentTarget=b[h],e=$b(g,d,!0,a)&&e;a.T||(g=a.currentTarget=c,e=$b(g,d,!0,a)&&e,a.T||(e=$b(g,d,!1,a)&&e));if(b)for(h=0;!a.T&&h<b.length;h++)g=a.currentTarget=b[h],e=$b(g,d,!1,a)&&e;return e};
+f.d=function(a,b,c,d){Zb(this);return this.ea.add(a,b,!1,c,d)};f.v=function(a,b,c,d){return this.ea.remove(a,b,c,d)};var $b=function(a,b,c,d){b=a.ea.q[b];if(!b)return!0;b=Aa(b);for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.N&&h.capture==c){var k=h.H,m=h.Y||h.src;h.ja&&Sb(a.ea,h);e=!1!==k.call(m,d)&&e}}return e&&!1!=d.wb};L.prototype.Za=function(a,b,c,d){return this.ea.Za(a,b,c,d)};var Zb=function(a){u(a.ea,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var M=function(a,b){a.style.display=b?"":"none"},ac=A?"MozUserSelect":B?"WebkitUserSelect":null,bc=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(ac){if(b=b?"none":"",a.style[ac]=b,c){a=0;for(var d;d=c[a];a++)d.style[ac]=b}}else if(z||Qa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var cc=function(){};ba(cc);cc.prototype.ec=0;cc.ca();var N=function(a){L.call(this);this.n=a||kb();this.va=dc};t(N,L);N.prototype.dc=cc.ca();var dc=null,ec=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=N.prototype;f.Q=null;f.e=!1;f.c=null;f.va=null;f.o=null;f.s=null;f.i=null;
+var fc=function(a){return a.Q||(a.Q=":"+(a.dc.ec++).toString(36))},gc=function(a,b){if(a.o&&a.o.i){var c=a.o.i,d=a.Q;d in c&&delete c[d];Fa(a.o.i,b,a)}a.Q=b};N.prototype.a=function(){return this.c};var hc=function(a){return a.hb||(a.hb=new K(a))},ic=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.o&&a.Q&&a.o.i&&a.Q&&(a.Q in a.o.i&&a.o.i[a.Q])&&a.o!=b)throw Error("Unable to set parent component");a.o=b;N.f.Wa.call(a,b)};f=N.prototype;f.getParent=function(){return this.o};
+f.Wa=function(a){if(this.o&&this.o!=a)throw Error("Method not supported");N.f.Wa.call(this,a)};f.jb=function(){return this.n};f.r=function(){this.c=this.n.createElement("div")};f.L=function(a){if(this.e)throw Error("Component already rendered");if(a&&this.ba(a)){var b=jb(a);this.n&&this.n.F==b||(this.n=kb(a));this.Va(a);this.G()}else throw Error("Invalid element to decorate");};f.ba=function(){return!0};f.Va=function(a){this.c=a};f.G=function(){this.e=!0;jc(this,function(a){!a.e&&a.a()&&a.G()})};
+f.ga=function(){jc(this,function(a){a.e&&a.ga()});this.hb&&this.hb.Ya();this.e=!1};f.Da=function(a,b){this.Ra(a,kc(this),b)};
+f.Ra=function(a,b,c){if(a.e&&(c||!this.e))throw Error("Component already rendered");if(0>b||b>kc(this))throw Error("Child component index out of bounds");this.i&&this.s||(this.i={},this.s=[]);if(a.getParent()==this){var d=fc(a);this.i[d]=a;y(this.s,a)}else Fa(this.i,fc(a),a);ic(a,this);Ca(this.s,b,0,a);if(a.e&&this.e&&a.getParent()==this)c=this.C(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.c||this.r();c=O(this,b+1);b=this.C();c=c?c.c:null;if(a.e)throw Error("Component already rendered");
+a.c||a.r();b?b.insertBefore(a.c,c||null):a.n.F.body.appendChild(a.c);a.o&&!a.o.e||a.G()}else this.e&&(!a.e&&a.c&&a.c.parentNode&&1==a.c.parentNode.nodeType)&&a.G()};f.C=function(){return this.c};
+var lc=function(a){if(null==a.va){var b;t:{b=a.e?a.c:a.n.F.body;var c=jb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,null))){b=b.direction||b.getPropertyValue("direction")||"";break t}b=""}a.va="rtl"==(b||((a.e?a.c:a.n.F.body).currentStyle?(a.e?a.c:a.n.F.body).currentStyle.direction:null)||(a.e?a.c:a.n.F.body).style&&(a.e?a.c:a.n.F.body).style.direction)}return a.va};
+N.prototype.ta=function(a){if(this.e)throw Error("Component already rendered");this.va=a};var kc=function(a){return a.s?a.s.length:0},O=function(a,b){return a.s?a.s[b]||null:null},jc=function(a,b,c){a.s&&xa(a.s,b,c)},mc=function(a,b){return a.s&&b?wa(a.s,b):-1};
+N.prototype.removeChild=function(a,b){if(a){var c=n(a)?a:fc(a);a=this.i&&c?(c in this.i?this.i[c]:void 0)||null:null;if(c&&a){var d=this.i;c in d&&delete d[c];y(this.s,a);b&&(a.ga(),a.c&&(c=a.c)&&c.parentNode&&c.parentNode.removeChild(c));ic(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var nc,oc={kc:"activedescendant",pc:"atomic",qc:"autocomplete",sc:"busy",vc:"checked",Ac:"controls",Cc:"describedby",Fc:"disabled",Hc:"dropeffect",Ic:"expanded",Jc:"flowto",Lc:"grabbed",Pc:"haspopup",Rc:"hidden",Tc:"invalid",Uc:"label",Vc:"labelledby",Wc:"level",ad:"live",ld:"multiline",md:"multiselectable",qd:"orientation",rd:"owns",sd:"posinset",ud:"pressed",yd:"readonly",Ad:"relevant",Bd:"required",Hd:"selected",Jd:"setsize",Ld:"sort",Yd:"valuemax",Zd:"valuemin",$d:"valuenow",ae:"valuetext"};var pc={lc:"alert",mc:"alertdialog",nc:"application",oc:"article",rc:"banner",tc:"button",uc:"checkbox",wc:"columnheader",xc:"combobox",yc:"complementary",zc:"contentinfo",Bc:"definition",Dc:"dialog",Ec:"directory",Gc:"document",Kc:"form",Mc:"grid",Nc:"gridcell",Oc:"group",Qc:"heading",Sc:"img",Xc:"link",Yc:"list",Zc:"listbox",$c:"listitem",bd:"log",cd:"main",dd:"marquee",ed:"math",fd:"menu",gd:"menubar",hd:"menuitem",jd:"menuitemcheckbox",kd:"menuitemradio",nd:"navigation",od:"note",pd:"option",
+td:"presentation",vd:"progressbar",wd:"radio",xd:"radiogroup",zd:"region",Cd:"row",Dd:"rowgroup",Ed:"rowheader",Fd:"scrollbar",Gd:"search",Id:"separator",Kd:"slider",Md:"spinbutton",Nd:"status",Od:"tab",Pd:"tablist",Qd:"tabpanel",Rd:"textbox",Sd:"timer",Td:"toolbar",Ud:"tooltip",Vd:"tree",Wd:"treegrid",Xd:"treeitem"};var qc=function(a,b){b?(u(Ea(pc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},sc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=rc(b);""===c||void 0==c?(nc||(nc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=nc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
+c)},rc=function(a){u(a,"ARIA attribute cannot be empty.");u(Ea(oc,a),"No such ARIA attribute "+a);return"aria-"+a};var uc=function(a,b,c,d,e){if(!(z||B&&D("525")))return!0;if(C&&e)return tc(a);if(e&&!d||!c&&(17==b||18==b||C&&91==b))return!1;if(B&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(z&&d&&b==a)return!1;switch(a){case 13:return!(z&&z&&9<=bb);case 27:return!B}return tc(a)},tc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||B&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
+default:return!1}},vc=function(a){switch(a){case 61:return 187;case 59:return 186;case 224:return 91;case 0:return 224;default:return a}};var P=function(a,b){L.call(this);a&&wc(this,a,b)};t(P,L);f=P.prototype;f.c=null;f.Ea=null;f.Ua=null;f.Fa=null;f.t=-1;f.P=-1;f.ib=!1;
+var xc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},yc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},zc=z||B&&D("525"),Ac=C&&A;
+P.prototype.Qb=function(a){B&&(17==this.t&&!a.ctrlKey||18==this.t&&!a.altKey||C&&91==this.t&&!a.metaKey)&&(this.P=this.t=-1);-1==this.t&&(a.ctrlKey&&17!=a.keyCode?this.t=17:a.altKey&&18!=a.keyCode?this.t=18:a.metaKey&&91!=a.keyCode&&(this.t=91));zc&&!uc(a.keyCode,this.t,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.P=A?vc(a.keyCode):a.keyCode,Ac&&(this.ib=a.altKey))};P.prototype.Rb=function(a){this.P=this.t=-1;this.ib=a.altKey};
+P.prototype.handleEvent=function(a){var b=a.R,c,d,e=b.altKey;z&&"keypress"==a.type?(c=this.P,d=13!=c&&27!=c?b.keyCode:0):B&&"keypress"==a.type?(c=this.P,d=0<=b.charCode&&63232>b.charCode&&tc(c)?b.charCode:0):Qa?(c=this.P,d=tc(c)?b.keyCode:0):(c=b.keyCode||this.P,d=b.charCode||0,Ac&&(e=this.ib),C&&(63==d&&224==c)&&(c=191));var g=c,h=b.keyIdentifier;c?63232<=c&&c in xc?g=xc[c]:25==c&&a.shiftKey&&(g=9):h&&h in yc&&(g=yc[h]);a=g==this.t;this.t=g;b=new Bc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+P.prototype.a=function(){return this.c};var wc=function(a,b,c){a.Fa&&a.detach();a.c=b;a.Ea=I(a.c,"keypress",a,c);a.Ua=I(a.c,"keydown",a.Qb,c,a);a.Fa=I(a.c,"keyup",a.Rb,c,a)};P.prototype.detach=function(){this.Ea&&(J(this.Ea),J(this.Ua),J(this.Fa),this.Fa=this.Ua=this.Ea=null);this.c=null;this.P=this.t=-1};var Bc=function(a,b,c,d){d&&Db(this,d,void 0);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};t(Bc,G);var Dc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!p(b))throw Error("Invalid decorator function "+b);Cc[a]=b},Ec={},Cc={};var Q=function(){};ba(Q);Q.prototype.X=function(){};var Fc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.r=function(a){return a.jb().r("div",this.wa(a).join(" "))};f.C=function(a){return a};f.ba=function(a){return"DIV"==a.tagName};f.L=function(a,b){b.id&&gc(a,b.id);var c=this.A(),d=!1,e=fb(b);e&&xa(e,function(b){b==c?d=!0:b&&this.$a(a,b,c)},this);d||E(b,c);Gc(a,this.C(b));return b};
+f.$a=function(a,b,c){b==c+"-disabled"?a.sa(!1):b==c+"-horizontal"?Hc(a,"horizontal"):b==c+"-vertical"&&Hc(a,"vertical")};var Gc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{e=void 0;for(var g=fb(c),h=0,k=g.length;h<k;h++)if(e=g[h]in Cc?Cc[g[h]]():null)break t;e=null}e&&(e.c=c,a.isEnabled()||e.sa(!1),a.Da(e),e.L(c))}else c.nodeValue&&""!=ma(c.nodeValue)||b.removeChild(c);c=d}};
+Q.prototype.Ka=function(a){a=a.a();u(a,"The container DOM element cannot be null.");bc(a,!0,A);z&&(a.hideFocus=!0);var b=this.X();b&&qc(a,b)};Q.prototype.k=function(a){return a.a()};Q.prototype.A=function(){return"goog-container"};Q.prototype.wa=function(a){var b=this.A(),c=[b,"horizontal"==a.M?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Ic;ba(R);f=R.prototype;f.X=function(){};f.r=function(a){var b=a.jb().r("div",this.wa(a).join(" "),a.Ca);Jc(a,b);return b};f.C=function(a){return a};f.ua=function(a,b,c){if(a=a.a?a.a():a)if(z&&!D("7")){var d=Kc(fb(a),b);d.push(b);ia(c?E:hb,a).apply(null,d)}else c?E(a,b):hb(a,b)};f.ba=function(){return!0};
+f.L=function(a,b){b.id&&gc(a,b.id);var c=this.C(b);c&&c.firstChild?Lc(a,c.firstChild.nextSibling?Aa(c.childNodes):c.firstChild):a.Ca=null;var d=0,e=this.A(),g=this.A(),h=!1,k=!1,c=!1,m=fb(b);xa(m,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.sb||(this.Ga||Mc(this),this.sb=Ga(this.Ga));a=parseInt(this.sb[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(m.push(e),g==e&&(k=!0));k||m.push(g);var v=a.I;v&&m.push.apply(m,v);if(z&&!D("7")){var r=Kc(m);0<r.length&&(m.push.apply(m,
+r),c=!0)}if(!h||!k||v||c)b.className=m.join(" ");Jc(a,b);return b};f.Ka=function(a){lc(a)&&this.ta(a.a(),!0);a.isEnabled()&&this.qa(a,a.u())};var Nc=function(a,b,c){if(a=c||a.X())u(b,"The element passed as a first parameter cannot be null."),qc(b,a)},Jc=function(a,b){u(a);u(b);a.u()||sc(b,"hidden",!a.u());a.isEnabled()||Oc(b,1,!a.isEnabled());a.m&8&&Oc(b,8,!!(a.g&8));a.m&16&&Oc(b,16,!!(a.g&16));a.m&64&&Oc(b,64,!!(a.g&64))};f=R.prototype;f.Aa=function(a,b){bc(a,!b,!z&&!Qa)};
+f.ta=function(a,b){this.ua(a,this.A()+"-rtl",b)};f.U=function(a){var b;return a.m&32&&(b=a.k())?wb(b):!1};f.qa=function(a,b){var c;if(a.m&32&&(c=a.k())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.oa(null)}wb(c)!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ma=function(a,b){M(a,b);a&&sc(a,"hidden",!b)};f.w=function(a,b,c){var d=a.a();if(d){var e=Pc(this,b);e&&this.ua(a,e,c);Oc(d,b,c)}};
+var Oc=function(a,b,c){Ic||(Ic={1:"disabled",8:"selected",16:"checked",64:"expanded"});if(b=Ic[b])u(a,"The element passed as a first parameter cannot be null."),sc(a,b,c)};R.prototype.k=function(a){return a.a()};R.prototype.A=function(){return"goog-control"};R.prototype.wa=function(a){var b=this.A(),c=[b],d=this.A();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Pc(this,e));b&=~e}c.push.apply(c,d);(a=a.I)&&c.push.apply(c,a);z&&!D("7")&&c.push.apply(c,Kc(c));return c};
+var Kc=function(a,b){var c=[];b&&(a=a.concat([b]));xa([],function(d){!za(d,ia(x,a))||b&&!x(d,b)||c.push(d.join("_"))});return c},Pc=function(a,b){a.Ga||Mc(a);return a.Ga[b]},Mc=function(a){var b=a.A();a.Ga={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){N.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=s(b);if(d=Ec[d])break;b=b.f?b.f.constructor:null}b=d?p(d.ca)?d.ca():new d:null}this.b=b;this.Ca=a};t(S,N);f=S.prototype;f.Ca=null;f.g=0;f.m=39;f.cc=255;f.Z=0;f.p=!0;f.I=null;f.fa=!0;f.ya=!1;f.pb=null;f.ob=function(){return this.fa};f.La=function(a){this.e&&a!=this.fa&&Qc(this,a);this.fa=a};f.k=function(){return this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P)};f.Ab=function(){return this.b};
+f.ua=function(a,b){b?a&&(this.I?x(this.I,a)||this.I.push(a):this.I=[a],this.b.ua(this,a,!0)):a&&(this.I&&y(this.I,a))&&(0==this.I.length&&(this.I=null),this.b.ua(this,a,!1))};f.r=function(){var a=this.b.r(this);this.c=a;Nc(this.b,a,this.pb);this.ya||this.b.Aa(a,!1);this.u()||this.b.ma(a,!1)};f.C=function(){return this.b.C(this.a())};f.ba=function(a){return this.b.ba(a)};f.Va=function(a){this.c=a=this.b.L(this,a);Nc(this.b,a,this.pb);this.ya||this.b.Aa(a,!1);this.p="none"!=a.style.display};
+f.G=function(){S.f.G.call(this);this.b.Ka(this);if(this.m&-2&&(this.ob()&&Qc(this,!0),this.m&32)){var a=this.k();if(a){var b=this.za();wc(b,a);hc(this).d(b,"key",this.K).d(a,"focus",this.pa).d(a,"blur",this.oa)}}};
+var Qc=function(a,b){var c=hc(a),d=a.a();b?(c.d(d,"mouseover",a.Oa).d(d,"mousedown",a.na).d(d,"mouseup",a.Pa).d(d,"mouseout",a.Na),a.ra!=aa&&c.d(d,"contextmenu",a.ra),z&&c.d(d,"dblclick",a.rb)):(c.v(d,"mouseover",a.Oa).v(d,"mousedown",a.na).v(d,"mouseup",a.Pa).v(d,"mouseout",a.Na),a.ra!=aa&&c.v(d,"contextmenu",a.ra),z&&c.v(d,"dblclick",a.rb))};S.prototype.ga=function(){S.f.ga.call(this);this.ka&&this.ka.detach();this.u()&&this.isEnabled()&&this.b.qa(this,!1)};var Lc=function(a,b){a.Ca=b};f=S.prototype;
+f.ta=function(a){S.f.ta.call(this,a);var b=this.a();b&&this.b.ta(b,a)};f.Aa=function(a){this.ya=a;var b=this.a();b&&this.b.Aa(b,a)};f.u=function(){return this.p};f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ma(c,a);this.isEnabled()&&this.b.qa(this,a);this.p=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
+f.sa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.D(!1)),this.u()&&this.b.qa(this,a),this.w(1,!a))};f.D=function(a){T(this,2,a)&&this.w(2,a)};f.setActive=function(a){T(this,4,a)&&this.w(4,a)};var Rc=function(a,b){T(a,8,b)&&a.w(8,b)},Sc=function(a,b){T(a,64,b)&&a.w(64,b)};S.prototype.w=function(a,b){this.m&a&&b!=!!(this.g&a)&&(this.b.w(this,a,b),this.g=b?this.g|a:this.g&~a)};
+var Tc=function(a,b,c){if(a.e&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.w(b,!1);a.m=c?a.m|b:a.m&~b},U=function(a,b){return!!(a.cc&b)&&!!(a.m&b)},T=function(a,b,c){return!!(a.m&b)&&!!(a.g&b)!=c&&(!(a.Z&b)||a.dispatchEvent(ec(b,c)))&&!a.Sb};f=S.prototype;f.Oa=function(a){(!a.relatedTarget||!sb(this.a(),a.relatedTarget))&&(this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2))&&this.D(!0)};
+f.Na=function(a){a.relatedTarget&&sb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.D(!1))};f.ra=aa;f.na=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),!Fb(a)||B&&C&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.U(this)&&this.k().focus()));this.ya||(!Fb(a)||B&&C&&a.ctrlKey)||a.preventDefault()};f.Pa=function(a){this.isEnabled()&&(U(this,2)&&this.D(!0),this.g&4&&(Uc(this,a)&&U(this,4))&&this.setActive(!1))};
+f.rb=function(a){this.isEnabled()&&Uc(this,a)};var Uc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.w(16,c)}U(a,8)&&Rc(a,!0);U(a,64)&&Sc(a,!(a.g&64));c=new F("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.ab=b.ab);return a.dispatchEvent(c)};S.prototype.pa=function(){U(this,32)&&T(this,32,!0)&&this.w(32,!0)};S.prototype.oa=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.w(32,!1)};
+S.prototype.K=function(a){return this.u()&&this.isEnabled()&&this.kb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.kb=function(a){return 13==a.keyCode&&Uc(this,a)};if(!p(S))throw Error("Invalid component class "+S);if(!p(R))throw Error("Invalid renderer class "+R);var Vc=s(S);Ec[Vc]=R;Dc("goog-control",function(){return new S(null)});var V=function(a,b,c){N.call(this,c);this.b=b||Q.ca();this.M=a||"vertical"};t(V,N);f=V.prototype;f.tb=null;f.ka=null;f.b=null;f.M=null;f.p=!0;f.$=!0;f.Xa=!0;f.j=-1;f.h=null;f.ha=!1;f.Pb=!1;f.Ob=!0;f.O=null;f.k=function(){return this.tb||this.b.k(this)};f.za=function(){return this.ka||(this.ka=new P(this.k()))};f.Ab=function(){return this.b};f.r=function(){this.c=this.b.r(this)};f.C=function(){return this.b.C(this.a())};f.ba=function(a){return this.b.ba(a)};
+f.Va=function(a){this.c=this.b.L(this,a);"none"==a.style.display&&(this.p=!1)};f.G=function(){V.f.G.call(this);jc(this,function(a){a.e&&Wc(this,a)},this);var a=this.a();this.b.Ka(this);this.ma(this.p,!0);hc(this).d(this,"enter",this.Ib).d(this,"highlight",this.Jb).d(this,"unhighlight",this.Lb).d(this,"open",this.Kb).d(this,"close",this.Gb).d(a,"mousedown",this.na).d(jb(a),"mouseup",this.Hb).d(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Fb);this.U()&&Xc(this,!0)};
+var Xc=function(a,b){var c=hc(a),d=a.k();b?c.d(d,"focus",a.pa).d(d,"blur",a.oa).d(a.za(),"key",a.K):c.v(d,"focus",a.pa).v(d,"blur",a.oa).v(a.za(),"key",a.K)};f=V.prototype;f.ga=function(){Yc(this,-1);this.h&&Sc(this.h,!1);this.ha=!1;V.f.ga.call(this)};f.Ib=function(){return!0};
+f.Jb=function(a){var b=mc(this,a.target);if(-1<b&&b!=this.j){var c=O(this,this.j);c&&c.D(!1);this.j=b;c=O(this,this.j);this.ha&&c.setActive(!0);this.Ob&&(this.h&&c!=this.h)&&(c.m&64?Sc(c,!0):Sc(this.h,!1))}b=this.a();u(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&sc(b,"activedescendant",a.target.a().id)};f.Lb=function(a){a.target==O(this,this.j)&&(this.j=-1);a=this.a();u(a,"The DOM element for the container cannot be null.");a.removeAttribute(rc("activedescendant"))};
+f.Kb=function(a){(a=a.target)&&(a!=this.h&&a.getParent()==this)&&(this.h&&Sc(this.h,!1),this.h=a)};f.Gb=function(a){a.target==this.h&&(this.h=null)};f.na=function(a){this.$&&(this.ha=!0);var b=this.k();b&&wb(b)?b.focus():a.preventDefault()};f.Hb=function(){this.ha=!1};
+f.Fb=function(a){var b;t:{b=a.target;if(this.O)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.O){b=this.O[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.na(a);break;case "mouseup":b.Pa(a);break;case "mouseover":b.Oa(a);break;case "mouseout":b.Na(a);break;case "contextmenu":b.ra(a)}};f.pa=function(){};f.oa=function(){Yc(this,-1);this.ha=!1;this.h&&Sc(this.h,!1)};
+f.K=function(a){return this.isEnabled()&&this.u()&&(0!=kc(this)||this.tb)&&this.kb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
+f.kb=function(a){var b=O(this,this.j);if(b&&"function"==typeof b.K&&b.K(a)||this.h&&this.h!=b&&"function"==typeof this.h.K&&this.h.K(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.U())this.k().blur();else return!1;break;case 36:Zc(this);break;case 35:$c(this);break;case 38:if("vertical"==this.M)ad(this);else return!1;break;case 37:if("horizontal"==this.M)lc(this)?bd(this):ad(this);else return!1;break;case 40:if("vertical"==this.M)bd(this);else return!1;
+break;case 39:if("horizontal"==this.M)lc(this)?ad(this):bd(this);else return!1;break;default:return!1}return!0};var Wc=function(a,b){var c=b.a(),c=c.id||(c.id=fc(b));a.O||(a.O={});a.O[c]=b};V.prototype.Da=function(a,b){va(a,S,"The child of a container must be a control");V.f.Da.call(this,a,b)};V.prototype.Ra=function(a,b,c){a.Z|=2;a.Z|=64;!this.U()&&this.Pb||Tc(a,32,!1);a.La(!1);V.f.Ra.call(this,a,b,c);a.e&&this.e&&Wc(this,a);b<=this.j&&this.j++};
+V.prototype.removeChild=function(a,b){if(a=n(a)?this.i&&a?(a in this.i?this.i[a]:void 0)||null:null:a){var c=mc(this,a);-1!=c&&(c==this.j?a.D(!1):c<this.j&&this.j--);var d=a.a();d&&(d.id&&this.O)&&(c=this.O,d=d.id,d in c&&delete c[d])}a=V.f.removeChild.call(this,a,b);a.La(!0);return a};var Hc=function(a,b){if(a.a())throw Error("Component already rendered");a.M=b};f=V.prototype;f.u=function(){return this.p};
+f.ma=function(a,b){if(b||this.p!=a&&this.dispatchEvent(a?"show":"hide")){this.p=a;var c=this.a();c&&(M(c,a),this.U()&&Fc(this.k(),this.$&&this.p),b||this.dispatchEvent(this.p?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.$};f.sa=function(a){this.$!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.$=!0,jc(this,function(a){a.ub?delete a.ub:a.sa(!0)})):(jc(this,function(a){a.isEnabled()?a.sa(!1):a.ub=!0}),this.ha=this.$=!1),this.U()&&Fc(this.k(),a&&this.p))};
+f.U=function(){return this.Xa};f.qa=function(a){a!=this.Xa&&this.e&&Xc(this,a);this.Xa=a;this.$&&this.p&&Fc(this.k(),a)};var Yc=function(a,b){var c=O(a,b);c?c.D(!0):-1<a.j&&O(a,a.j).D(!1)};V.prototype.D=function(a){Yc(this,mc(this,a))};
+var Zc=function(a){cd(a,function(a,c){return(a+1)%c},kc(a)-1)},$c=function(a){cd(a,function(a,c){a--;return 0>a?c-1:a},0)},bd=function(a){cd(a,function(a,c){return(a+1)%c},a.j)},ad=function(a){cd(a,function(a,c){a--;return 0>a?c-1:a},a.j)},cd=function(a,b,c){c=0>c?mc(a,a.h):c;var d=kc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=O(a,c);if(g&&g.u()&&g.isEnabled()&&g.m&2){a.Sa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Sa=function(a){Yc(this,a)};var dd=function(){};t(dd,R);ba(dd);f=dd.prototype;f.A=function(){return"goog-tab"};f.X=function(){return"tab"};f.r=function(a){var b=dd.f.r.call(this,a);(a=a.Qa())&&this.Ta(b,a);return b};f.L=function(a,b){b=dd.f.L.call(this,a,b);var c=this.Qa(b);c&&(a.qb=c);a.g&8&&(c=a.getParent())&&p(c.aa)&&(a.w(8,!1),c.aa(a));return b};f.Qa=function(a){return a.title||""};f.Ta=function(a,b){a&&(a.title=b||"")};var ed=function(a,b,c){S.call(this,a,b||dd.ca(),c);Tc(this,8,!0);this.Z|=9};t(ed,S);ed.prototype.Qa=function(){return this.qb};ed.prototype.Ta=function(a){this.Ab().Ta(this.a(),a);this.qb=a};Dc("goog-tab",function(){return new ed(null)});var W=function(){};t(W,Q);ba(W);W.prototype.A=function(){return"goog-tab-bar"};W.prototype.X=function(){return"tablist"};W.prototype.$a=function(a,b,c){this.Bb||(this.Ha||fd(this),this.Bb=Ga(this.Ha));var d=this.Bb[b];d?(Hc(a,gd(d)),a.vb=d):W.f.$a.call(this,a,b,c)};W.prototype.wa=function(a){var b=W.f.wa.call(this,a);this.Ha||fd(this);b.push(this.Ha[a.vb]);return b};var fd=function(a){var b=a.A();a.Ha={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Hc(this,gd(a));this.vb=a;V.call(this,this.M,b||W.ca(),c);hd(this)};t(X,V);f=X.prototype;f.Zb=!0;f.J=null;f.G=function(){X.f.G.call(this);hd(this)};f.removeChild=function(a,b){id(this,a);return X.f.removeChild.call(this,a,b)};f.Sa=function(a){X.f.Sa.call(this,a);this.Zb&&this.aa(O(this,a))};f.aa=function(a){a?Rc(a,!0):this.J&&Rc(this.J,!1)};
+var id=function(a,b){if(b&&b==a.J){for(var c=mc(a,b),d=c-1;b=O(a,d);d--)if(b.u()&&b.isEnabled()){a.aa(b);return}for(c+=1;b=O(a,c);c++)if(b.u()&&b.isEnabled()){a.aa(b);return}a.aa(null)}};f=X.prototype;f.Xb=function(a){this.J&&this.J!=a.target&&Rc(this.J,!1);this.J=a.target};f.Yb=function(a){a.target==this.J&&(this.J=null)};f.Vb=function(a){id(this,a.target)};f.Wb=function(a){id(this,a.target)};f.pa=function(){O(this,this.j)||this.D(this.J||O(this,0))};
+var hd=function(a){hc(a).d(a,"select",a.Xb).d(a,"unselect",a.Yb).d(a,"disable",a.Vb).d(a,"hide",a.Wb)},gd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Dc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,qc(a,h.X()),E(a,"goog-zippy-header"),jd(h,a),a&&h.Mb.d(a,"keydown",h.Nb))}L.call(this);this.n=e||kb();this.V=this.n.a(a)||null;this.Ba=this.n.a(d||null);this.ia=(this.Ma=p(b)?b:null)||!b?null:this.n.a(b);this.l=!0==c;this.Mb=new K(this);this.nb=new K(this);var h=this;g(this.V);g(this.Ba);this.W(this.l)};t(Y,L);f=Y.prototype;f.fa=!0;f.X=function(){return"tab"};f.C=function(){return this.ia};f.toggle=function(){this.W(!this.l)};
+f.W=function(a){this.ia?M(this.ia,a):a&&this.Ma&&(this.ia=this.Ma());this.ia&&E(this.ia,"goog-zippy-content");if(this.Ba)M(this.V,!a),M(this.Ba,a);else if(this.V){var b=this.V;a?E(b,"goog-zippy-expanded"):hb(b,"goog-zippy-expanded");b=this.V;a?hb(b,"goog-zippy-collapsed"):E(b,"goog-zippy-collapsed");sc(this.V,"expanded",a)}this.l=a;this.dispatchEvent(new kd("toggle",this))};f.ob=function(){return this.fa};f.La=function(a){this.fa!=a&&((this.fa=a)?(jd(this,this.V),jd(this,this.Ba)):this.nb.Ya())};
+var jd=function(a,b){b&&a.nb.d(b,"click",a.$b)};Y.prototype.Nb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new F("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.$b=function(){this.toggle();this.dispatchEvent(new F("action",this))};var kd=function(a,b){F.call(this,a,b)};t(kd,F);var Z=function(a,b){this.mb=[];for(var c=lb(a),c=mb("span","ae-zippy",c),d=0,e;e=c[d];d++){var g;if(void 0!=e.parentNode.parentNode.parentNode.nextElementSibling)g=e.parentNode.parentNode.parentNode.nextElementSibling;else for(g=e.parentNode.parentNode.parentNode.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.mb.push(e)}this.fc=new ld(this.mb,lb(b))};Z.prototype.ic=function(){return this.fc};Z.prototype.jc=function(){return this.mb};
+var ld=function(a,b){this.xa=a;if(this.xa.length)for(var c=0,d;d=this.xa[c];c++)I(d,"toggle",this.Ub,!1,this);this.Ja=0;this.l=!1;c="ae-toggle ae-plus ae-action";this.xa.length||(c+=" ae-disabled");this.S=qb("span",{className:c},"Expand All");I(this.S,"click",this.Tb,!1,this);b&&b.appendChild(this.S)};ld.prototype.Tb=function(){this.xa.length&&this.W(!this.l)};
+ld.prototype.Ub=function(a){a=a.currentTarget;this.Ja=a.l?this.Ja+1:this.Ja-1;a.l!=this.l&&(a.l?(this.l=!0,md(this,!0)):0==this.Ja&&(this.l=!1,md(this,!1)))};ld.prototype.W=function(a){this.l=a;a=0;for(var b;b=this.xa[a];a++)b.l!=this.l&&b.W(this.l);md(this)};
+var md=function(a,b){(void 0!==b?b:a.l)?(hb(a.S,"ae-plus"),E(a.S,"ae-minus"),tb(a.S,"Collapse All")):(hb(a.S,"ae-minus"),E(a.S,"ae-plus"),tb(a.S,"Expand All"))},nd=function(a){this.ac=a;this.Db={};var b,c=qb("div",{},b=qb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),qb("div",{className:"goog-tab-bar-clear"}),a=qb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.L(b);I(d,"select",this.Cb,!1,this);I(d,"unselect",this.Cb,!1,this);b=
+0;for(var e;e=this.ac[b];b++)if(e=lb("ae-stats-details-"+e)){var g=mb("h2",null,e)[0],h;h=g;var k=void 0;eb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],xb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");eb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new ed(h);this.Db[s(g)]=e;d.Da(g,!0);a.appendChild(e);0==b?d.aa(g):M(e,!1)}lb("bd").appendChild(c)};
+nd.prototype.Cb=function(a){var b=this.Db[s(a.target)];M(b,"select"==a.type)};ja("ae.Stats.Details.Tabs",nd);ja("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.W;ja("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.ic;Z.prototype.getZippys=Z.prototype.jc;ld.prototype.setExpanded=ld.prototype.W;var $=function(){this.bb=[];this.gb=[]},od=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],pd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<od.length;c++)if(a<=od[c][2])return[od[c][0],od[c][1]*b,od[c][2]*b];return[5,2*b,10*b]};$.prototype.fb="stats/static/pix.gif";$.prototype.B="ae-stats-gantt-";$.prototype.eb=0;$.prototype.write=function(a){this.gb.push(a)};
+var qd=function(a,b,c,d){a.write('<tr class="'+a.B+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.B+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.B+'tick" src="'+a.fb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.B+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.hc=function(){this.gb=[];var a=pd(this.eb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.B+'table">\n');qd(this,b,c,a);for(var d=0;d<this.bb.length;d++){var e=this.bb[d];this.write('<tr class="'+this.B+'datarow"><td width="20%">');0<e.label.length&&(0<e.la.length&&this.write('<a class="'+this.B+'link" href="'+e.la+'">'),this.write(e.label),0<e.la.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.B+'container">');0<e.la.length&&this.write('<a class="'+
+this.B+'link" href="'+e.la+'"\n>');this.write('<img class="'+this.B+'bar" src="'+this.fb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.cb&&(this.write('<img class="'+this.B+'extra" src="'+this.fb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.cb*a+'%"\n>'));0<e.zb.length&&(this.write('<span class="'+this.B+'inline" style="left:'+(e.start+Math.max(e.duration,e.cb))*a+'%">&nbsp;'),this.write(e.zb),this.write("</span>"));0<e.la.length&&
+this.write("</a>");this.write("</div></td></tr>\n")}qd(this,b,c,a);this.write("</table>\n");return this.gb.join("")};$.prototype.gc=function(a,b,c,d,e,g){this.eb=Math.max(this.eb,Math.max(b+c,b+d));this.bb.push({label:a,start:b,duration:c,cb:d,zb:e,la:g})};ja("Gantt",$);$.prototype.add_bar=$.prototype.gc;$.prototype.draw=$.prototype.hc;})();
diff --git a/google/appengine/ext/blobstore/blobstore.py b/google/appengine/ext/blobstore/blobstore.py
index 777fca9..909c1d3 100644
--- a/google/appengine/ext/blobstore/blobstore.py
+++ b/google/appengine/ext/blobstore/blobstore.py
@@ -36,6 +36,7 @@
 
 import base64
 import email
+import email.message
 
 from google.appengine.api import datastore
 from google.appengine.api import datastore_errors
@@ -402,6 +403,26 @@
   return BlobInfo.get(blob_key)
 
 
+def _get_upload_content(field_storage):
+  """Returns an email.Message holding the values of the file transfer.
+
+  It decodes the content of the field storage and creates a new email.Message.
+
+  Args:
+    field_storage: cgi.FieldStorage that represents uploaded blob.
+
+  Returns:
+    An email.message.Message holding the upload information.
+  """
+  message = email.message.Message()
+  message.add_header(
+      'content-transfer-encoding',
+      field_storage.headers.getheader('Content-Transfer-Encoding', ''))
+  message.set_payload(field_storage.file.read())
+  payload = message.get_payload(decode=True)
+  return email.message_from_string(payload)
+
+
 def _parse_upload_info(field_storage, error_class):
   """Parse the upload info from file upload field_storage.
 
@@ -432,7 +453,7 @@
   filename = get_value(field_storage.disposition_options, 'filename')
   blob_key = field_storage.type_options.get('blob-key', None)
 
-  upload_content = email.message_from_file(field_storage.file)
+  upload_content = _get_upload_content(field_storage)
 
 
   field_storage.file.seek(0)
diff --git a/google/appengine/ext/cloudstorage/cloudstorage_stub.py b/google/appengine/ext/cloudstorage/cloudstorage_stub.py
index fc5414b..253629f 100644
--- a/google/appengine/ext/cloudstorage/cloudstorage_stub.py
+++ b/google/appengine/ext/cloudstorage/cloudstorage_stub.py
@@ -31,6 +31,9 @@
 from google.appengine.ext.cloudstorage import common
 
 
+_GCS_DEFAULT_CONTENT_TYPE = 'binary/octet-stream'
+
+
 class _AE_GCSFileInfo_(db.Model):
   """Store GCS specific info.
 
@@ -53,7 +56,7 @@
 
   creation = db.DateTimeProperty()
 
-  content_type = db.StringProperty()
+  content_type = db.StringProperty(default=_GCS_DEFAULT_CONTENT_TYPE)
   etag = db.ByteStringProperty()
 
   def get_options(self):
@@ -234,11 +237,11 @@
     common.validate_file_path(dst)
 
 
-    src_blobkey = self._filename_to_blobkey(src)
-    source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey)
     ns = namespace_manager.get_namespace()
     try:
       namespace_manager.set_namespace('')
+      src_blobkey = self._filename_to_blobkey(src)
+      source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey)
       token = self._filename_to_blobkey(dst)
       new_file = _AE_GCSFileInfo_(key_name=token,
                                   filename=dst,
diff --git a/google/appengine/ext/cloudstorage/common.py b/google/appengine/ext/cloudstorage/common.py
index 210ff5b..0aa40cf 100644
--- a/google/appengine/ext/cloudstorage/common.py
+++ b/google/appengine/ext/cloudstorage/common.py
@@ -76,8 +76,13 @@
 _GCS_BUCKET_REGEX = re.compile(_GCS_BUCKET_REGEX_BASE + r'$')
 _GCS_BUCKET_PATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'$')
 _GCS_FULLPATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'/.*')
-_GCS_OPTIONS = ('x-goog-acl',
-                'x-goog-meta-')
+
+_GCS_METADATA = ['x-goog-meta-',
+                 'content-disposition',
+                 'cache-control',
+                 'content-encoding']
+
+_GCS_OPTIONS = _GCS_METADATA + ['x-goog-acl']
 
 CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01'
 
@@ -128,8 +133,9 @@
       etag: hex digest of the md5 hash of the file's content. str.
       st_ctime: posix file creation time. float compatible.
       content_type: content type. str.
-      metadata: a str->str dict of user specified metadata from the
-        x-goog-meta header, e.g. {'x-goog-meta-foo': 'foo'}.
+      metadata: a str->str dict of user specified options when creating
+        the file. Possible keys are x-goog-meta-, content-disposition,
+        content-encoding, and cache-control.
     """
     self.filename = filename
     self.st_size = long(st_size)
@@ -159,9 +165,9 @@
 
 
 def get_metadata(headers):
-  """Get user defined metadata from HTTP response headers."""
+  """Get user defined options from HTTP response headers."""
   return dict((k, v) for k, v in headers.iteritems()
-              if k.startswith('x-goog-meta-'))
+              if any(k.lower().startswith(valid) for valid in _GCS_METADATA))
 
 
 def validate_bucket_name(name):
@@ -244,7 +250,7 @@
   for k, v in options.iteritems():
     if not isinstance(k, str):
       raise TypeError('option %r should be a str.' % k)
-    if not any(k.startswith(valid) for valid in _GCS_OPTIONS):
+    if not any(k.lower().startswith(valid) for valid in _GCS_OPTIONS):
       raise ValueError('option %s is not supported.' % k)
     if not isinstance(v, basestring):
       raise TypeError('value %r for option %s should be of type basestring.' %
diff --git a/google/appengine/ext/cloudstorage/stub_dispatcher.py b/google/appengine/ext/cloudstorage/stub_dispatcher.py
index dda7df5..41e3e2b 100644
--- a/google/appengine/ext/cloudstorage/stub_dispatcher.py
+++ b/google/appengine/ext/cloudstorage/stub_dispatcher.py
@@ -145,7 +145,7 @@
     param_dict[k] = urllib.unquote(param_dict[k][0])
 
   headers = dict((k.lower(), v) for k, v in headers.iteritems())
-  return method, headers, filename, param_dict
+  return method, headers, urllib.unquote(filename), param_dict
 
 
 def _handle_post(gcs_stub, filename, headers):
@@ -174,6 +174,18 @@
   if not content_range.value:
     raise ValueError('Missing header content-range.')
 
+
+
+  if not token:
+
+    if not content_range.last:
+      raise ValueError('Content-Range must have a final length.')
+    elif not content_range.no_data and content_range.range[0] != 0:
+      raise ValueError('Content-Range must specify complete object.')
+    else:
+
+      token = gcs_stub.post_start_creation(filename, headers)
+
   gcs_stub.put_continue_creation(token,
                                  payload,
                                  content_range.range,
diff --git a/google/appengine/ext/datastore_admin/backup_handler.py b/google/appengine/ext/datastore_admin/backup_handler.py
index 5e28fca..c4bd376 100644
--- a/google/appengine/ext/datastore_admin/backup_handler.py
+++ b/google/appengine/ext/datastore_admin/backup_handler.py
@@ -65,6 +65,7 @@
 from google.appengine.ext import deferred
 from google.appengine.ext import webapp
 from google.appengine.ext.datastore_admin import backup_pb2
+from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import utils
 from google.appengine.ext.mapreduce import context
 from google.appengine.ext.mapreduce import datastore_range_iterators as db_iters
@@ -74,6 +75,14 @@
 from google.appengine.ext.mapreduce import output_writers
 from google.appengine.runtime import apiproxy_errors
 
+try:
+
+  from google.appengine.ext.datastore_admin import services_client
+
+except ImportError:
+
+  pass
+
 
 XSRF_ACTION = 'backup'
 BUCKET_PATTERN = (r'^([a-zA-Z0-9]+([\-_]+[a-zA-Z0-9]+)*)'
@@ -121,13 +130,14 @@
         'datastore_v3', capabilities=['write']).is_enabled()
     blob_warning = bool(blobstore.BlobInfo.all().count(1))
     template_params = {
+        'run_as_a_service': handler.request.get('run_as_a_service'),
         'form_target': DoBackupHandler.SUFFIX,
         'kind_list': kinds,
         'remainder': remainder,
         'sizes_known': sizes_known,
         'size_total': size_total,
         'queues': None,
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'namespaces': get_namespaces(handler.request.get('namespace', None)),
         'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
         'notreadonly_warning': notreadonly_warning,
@@ -169,10 +179,11 @@
           gs_warning |= backup.filesystem == files.GS_FILESYSTEM
     template_params = {
         'form_target': DoBackupDeleteHandler.SUFFIX,
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'backups': backups,
         'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
-        'gs_warning': gs_warning
+        'gs_warning': gs_warning,
+        'run_as_a_service': handler.request.get('run_as_a_service'),
     }
     utils.RenderToResponse(handler, 'confirm_delete_backup.html',
                            template_params)
@@ -198,9 +209,10 @@
           backups.append(backup)
     template_params = {
         'form_target': DoBackupAbortHandler.SUFFIX,
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'backups': backups,
-        'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION)
+        'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
+        'run_as_a_service': handler.request.get('run_as_a_service'),
     }
     utils.RenderToResponse(handler, 'confirm_abort_backup.html',
                            template_params)
@@ -231,13 +243,14 @@
     template_params = {
         'form_target': DoBackupRestoreHandler.SUFFIX,
         'queues': None,
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'backup': backup,
         'delete_backup_after_restore': handler.request.get(
             'delete_backup_after_restore', default_delete_backup_after_restore),
         'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
         'notreadonly_warning': notreadonly_warning,
-        'original_app_warning': original_app_warning
+        'original_app_warning': original_app_warning,
+        'run_as_a_service': handler.request.get('run_as_a_service'),
     }
     utils.RenderToResponse(handler, 'confirm_restore_from_backup.html',
                            template_params)
@@ -266,7 +279,7 @@
         bucket_name, prefix = parse_gs_handle(gs_handle)
         validate_gs_bucket_name(bucket_name)
         if not is_accessible_bucket_name(bucket_name):
-          raise BackupValidationException(
+          raise BackupValidationError(
               'Bucket "%s" is not accessible' % bucket_name)
         if prefix.endswith('.backup_info'):
           prefix = prefix[0:prefix.rfind('/')]
@@ -281,15 +294,17 @@
                 and backup_info_file.count('.') == 1):
             other_backup_info_files.append(backup_info_path)
       except Exception, ex:
-        error = 'Failed to read bucket: %s' % ex
+        error = 'Failed to read bucket: %s' % ex.message
+        logging.exception(ex.message)
     template_params = {
         'error': error,
         'form_target': DoBackupImportHandler.SUFFIX,
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'selected_backup_info_file': selected_backup_info_file,
         'other_backup_info_files': other_backup_info_files,
         'backup_info_specified': backup_info_specified,
-        'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION)
+        'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
+        'run_as_a_service': handler.request.get('run_as_a_service'),
     }
     utils.RenderToResponse(handler, 'confirm_backup_import.html',
                            template_params)
@@ -310,7 +325,8 @@
     backup_ids = handler.request.get_all('backup_id')
     template_params = {
         'backups': db.get(backup_ids),
-        'back_target': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
+        'run_as_a_service': handler.request.get('run_as_a_service'),
     }
     utils.RenderToResponse(handler, 'backup_information.html', template_params)
 
@@ -318,7 +334,7 @@
 class BaseDoHandler(webapp.RequestHandler):
   """Base class for all Do*Handlers."""
 
-  MAPREDUCE_DETAIL = utils.config.MAPREDUCE_PATH + '/detail?mapreduce_id='
+  MAPREDUCE_DETAIL = config.MAPREDUCE_PATH + '/detail?mapreduce_id='
 
   def get(self):
     """Handler for get requests to datastore_admin backup operations.
@@ -326,17 +342,18 @@
     Status of executed jobs is displayed.
     """
     jobs = self.request.get_all('job')
+    remote_job = self.request.get('remote_job')
     tasks = self.request.get_all('task')
     error = self.request.get('error', '')
     xsrf_error = self.request.get('xsrf_error', '')
-
     template_params = {
         'job_list': jobs,
+        'remote_job': remote_job,
         'task_list': tasks,
         'mapreduce_detail': self.MAPREDUCE_DETAIL,
         'error': error,
         'xsrf_error': xsrf_error,
-        'datastore_admin_home': utils.config.BASE_PATH,
+        'datastore_admin_home': utils.GenerateHomeUrl(self.request),
     }
     utils.RenderToResponse(self, self._get_html_page, template_params)
 
@@ -360,6 +377,20 @@
       namespace = None
     return {'namespace': namespace}
 
+  def SendRedirect(self, path=None, params=()):
+    """Send a redirect response."""
+
+    run_as_a_service = self.request.get('run_as_a_service')
+    if run_as_a_service:
+      params = list(params)
+      params.append(('run_as_a_service', True))
+    dest = config.BASE_PATH
+    if path:
+      dest = '%s/%s' % (dest, path)
+    if params:
+      dest = '%s?%s' % (dest, urllib.urlencode(params))
+    self.redirect(dest)
+
   def post(self):
     """Handler for post requests to datastore_admin/backup.do.
 
@@ -378,13 +409,10 @@
         error = self._HandleException(e)
         parameters = [('error', error)]
 
-    query = urllib.urlencode(parameters)
-    self.redirect('%s/%s?%s' % (utils.config.BASE_PATH,
-                                self._get_post_html_page,
-                                query))
+    self.SendRedirect(self._get_post_html_page, parameters)
 
   def _HandleException(self, e):
-    """Make exception handling overrideable by tests.
+    """Make exception handling overridable by tests.
 
     Args:
       e: The exception to handle.
@@ -392,19 +420,21 @@
     Returns:
       The exception error string.
     """
-    return '%s: %s' % (type(e), e)
+    logging.exception(e.message)
+    return '%s: %s' % (type(e), e.message)
 
 
-class BackupValidationException(Exception):
-  pass
+class BackupValidationError(utils.Error):
+  """Raised upon backup request validation."""
 
 
-def _perform_backup(kinds, selected_namespace,
+def _perform_backup(run_as_a_service, kinds, selected_namespace,
                     filesystem, gs_bucket_name, backup,
                     queue, mapper_params, max_jobs):
   """Triggers backup mapper jobs.
 
   Args:
+    run_as_a_service: True if backup should be done via admin-jobs
     kinds: a sequence of kind names
     selected_namespace: The selected namespace or None for all
     filesystem: files.BLOBSTORE_FILESYSTEM or files.GS_FILESYSTEM
@@ -420,7 +450,7 @@
     The job or task ids.
 
   Raises:
-    BackupValidationException: On validation error.
+    BackupValidationError: On validation error.
     Exception: On other error.
   """
   BACKUP_COMPLETE_HANDLER = __name__ +  '.BackupCompleteHandler'
@@ -428,6 +458,16 @@
   INPUT_READER = __name__ + '.DatastoreEntityProtoInputReader'
   OUTPUT_WRITER = output_writers.__name__ + '.FileRecordsOutputWriter'
 
+  if run_as_a_service:
+    if not gs_bucket_name:
+      raise BackupValidationError('Bucket name missing.')
+    gs_bucket_name = validate_and_canonicalize_gs_bucket(gs_bucket_name)
+    datastore_admin_service = services_client.DatastoreAdminClient()
+    description = 'Remote backup job: %s' % backup
+    remote_job_id = datastore_admin_service.create_backup(
+        description, backup, gs_bucket_name, selected_namespace, kinds)
+    return [('remote_job', remote_job_id)]
+
   queue = queue or os.environ.get('HTTP_X_APPENGINE_QUEUENAME', 'default')
   if queue[0] == '_':
 
@@ -437,15 +477,12 @@
   if filesystem == files.GS_FILESYSTEM:
 
     if not gs_bucket_name:
-      raise BackupValidationException('Bucket name missing.')
-    bucket_name, path = parse_gs_handle(gs_bucket_name)
-    gs_bucket_name = ('%s/%s' % (bucket_name, path)).rstrip('/')
-    validate_gs_bucket_name(bucket_name)
-    verify_bucket_writable(bucket_name)
+      raise BackupValidationError('Bucket name missing.')
+    gs_bucket_name = validate_and_canonicalize_gs_bucket(gs_bucket_name)
   elif filesystem == files.BLOBSTORE_FILESYSTEM:
     pass
   else:
-    raise BackupValidationException('Unknown filesystem "%s".' % filesystem)
+    raise BackupValidationError('Unknown filesystem "%s".' % filesystem)
 
   job_name = 'datastore_backup_%s_%%(kind)s' % re.sub(r'[^\w]', '_', backup)
   try:
@@ -479,7 +516,7 @@
                                      BACKUP_HANDLER, INPUT_READER,
                                      OUTPUT_WRITER, mapper_params,
                                      mapreduce_params, queue, _queue=queue,
-                                     _url=utils.ConfigDefaults.DEFERRED_PATH,
+                                     _url=config.DEFERRED_PATH,
                                      _retry_options=retry_options)
       return [('task', deferred_task.name)]
   except Exception:
@@ -529,7 +566,8 @@
       if namespace == '*':
         namespace = None
       mapper_params = {'namespace': namespace}
-      _perform_backup(kinds,
+      _perform_backup(self.request.get('run_as_a_service', False),
+                      kinds,
                       namespace,
                       self.request.get('filesystem'),
                       self.request.get('gs_bucket_name'),
@@ -563,11 +601,13 @@
     try:
       backup = self.request.get('backup_name').strip()
       if not backup:
-        raise BackupValidationException('Unspecified backup name.')
+        raise BackupValidationError('Unspecified backup name.')
       if BackupInformation.name_exists(backup):
-        raise BackupValidationException('Backup "%s" already exists.' % backup)
+        raise BackupValidationError('Backup "%s" already exists.' % backup)
       mapper_params = self._GetBasicMapperParams()
-      backup_result = _perform_backup(self.request.get_all('kind'),
+      backup_result = _perform_backup(self.request.get('run_as_a_service',
+                                                       False),
+                                      self.request.get_all('kind'),
                                       mapper_params.get('namespace'),
                                       self.request.get('filesystem'),
                                       self.request.get('gs_bucket_name'),
@@ -576,7 +616,8 @@
                                       mapper_params,
                                       10)
       return backup_result
-    except BackupValidationException, e:
+    except Exception, e:
+      logging.exception(e.message)
       return [('error', e.message)]
 
 
@@ -651,6 +692,8 @@
 def delete_backup_files(filesystem, backup_files):
   if backup_files:
 
+
+
     if filesystem == files.BLOBSTORE_FILESYSTEM:
 
 
@@ -694,7 +737,7 @@
     """
     backup_ids = self.request.get_all('backup_id')
     token = self.request.get('xsrf_token')
-    error = None
+    params = ()
     if backup_ids and utils.ValidateXsrfToken(token, XSRF_ACTION):
       try:
         for backup_info in db.get(backup_ids):
@@ -702,13 +745,9 @@
             delete_backup_info(backup_info)
       except Exception, e:
         logging.exception('Failed to delete datastore backup.')
-        error = str(e)
+        params = [('error', e.message)]
 
-    if error:
-      query = urllib.urlencode([('error', error)])
-      self.redirect('%s?%s' % (utils.config.BASE_PATH, query))
-    else:
-      self.redirect(utils.config.BASE_PATH)
+    self.SendRedirect(params=params)
 
 
 class DoBackupAbortHandler(BaseDoHandler):
@@ -726,21 +765,24 @@
     """
     backup_ids = self.request.get_all('backup_id')
     token = self.request.get('xsrf_token')
-    error = None
+    params = ()
     if backup_ids and utils.ValidateXsrfToken(token, XSRF_ACTION):
       try:
         for backup_info in db.get(backup_ids):
           if backup_info:
-            utils.AbortAdminOperation(backup_info.parent_key())
+            operation = backup_info.parent()
+            if operation.parent_key():
+              job_id = str(operation.parent_key())
+              datastore_admin_service = services_client.DatastoreAdminClient()
+              datastore_admin_service.abort_backup(job_id)
+            else:
+              utils.AbortAdminOperation(operation.key())
             delete_backup_info(backup_info)
       except Exception, e:
         logging.exception('Failed to abort pending datastore backup.')
-        error = str(e)
+        params = [('error', e.message)]
 
-    if error:
-      self.redirect(utils.config.BASE_PATH + '?error=%s' % error)
-    else:
-      self.redirect(utils.config.BASE_PATH)
+    self.SendRedirect(params=params)
 
 
 class DoBackupRestoreHandler(BaseDoHandler):
@@ -770,10 +812,6 @@
       if not is_readable_gs_handle(backup.gs_handle):
         return [('error', 'Backup not readable')]
 
-    queue = self.request.get('queue')
-    job_name = 'datastore_backup_restore_%s' % re.sub(r'[^\w]', '_',
-                                                      backup.name)
-    job_operation = None
     kinds = set(self.request.get_all('kind'))
     if not (backup.blob_files or kinds):
       return [('error', 'No kinds were selected')]
@@ -783,6 +821,20 @@
       return [('error', 'Backup does not have kind[s] %s' %
                ', '.join(difference))]
     kinds = list(kinds) if len(backup_kinds) != len(kinds) else []
+    if self.request.get('run_as_a_service', False):
+      if not backup.gs_handle:
+        return [('error',
+                 'Restore as a service is only available for GS backups')]
+      datastore_admin_service = services_client.DatastoreAdminClient()
+      description = 'Remote restore job: %s' % backup
+      remote_job_id = datastore_admin_service.restore_from_backup(
+          description, backup_id, kinds)
+      return [('remote_job', remote_job_id)]
+
+    queue = self.request.get('queue')
+    job_name = 'datastore_backup_restore_%s' % re.sub(r'[^\w]', '_',
+                                                      backup.name)
+    job_operation = None
     try:
       operation_name = 'Restoring %s from backup: %s' % (
           ', '.join(kinds) if kinds else 'all', backup.name)
@@ -793,7 +845,7 @@
       mapper_params['original_app'] = backup.original_app
       mapreduce_params = {
           'backup_name': backup.name,
-          'force_ops_writes': True
+          'force_ops_writes': True,
       }
       shard_count = min(max(utils.MAPREDUCE_MIN_SHARDS,
                             len(mapper_params['files'])),
@@ -810,12 +862,6 @@
         job_operation.status = utils.DatastoreAdminOperation.STATUS_FAILED
         job_operation.put(force_writes=True)
       raise
-    finally:
-
-
-
-      if self.request.get('delete_backup_after_restore', '').lower() == 'true':
-        delete_backup_info(backup, delete_files=False)
 
 
 class DoBackupImportHandler(BaseDoHandler):
@@ -858,21 +904,21 @@
         backup_id = str(backup_info.key())
       except Exception, e:
         logging.exception('Failed to Import datastore backup information.')
-        error = str(e)
+        error = e.message
 
     if error:
-      query = urllib.urlencode([('error', error)])
-      self.redirect('%s?%s' % (utils.config.BASE_PATH, query))
+      self.SendRedirect(params=(('error', error)))
     elif self.request.get('Restore'):
       ConfirmRestoreFromBackupHandler.Render(
           self, default_backup_id=backup_id,
           default_delete_backup_after_restore=True)
     else:
-      self.redirect(utils.config.BASE_PATH)
+      self.SendRedirect()
+
 
 
 class BackupInformation(db.Model):
-  """An entity to keep information on successful backup operations."""
+  """An entity to keep information on a datastore backup."""
 
   name = db.StringProperty()
   kinds = db.StringListProperty()
@@ -910,6 +956,7 @@
       return KindBackupFiles.all().ancestor(self).run()
 
 
+
 class KindBackupFiles(db.Model):
   """An entity to keep files information per kind for a backup.
 
@@ -927,64 +974,71 @@
     return utils.BACKUP_INFORMATION_FILES_KIND
 
 
-@db.transactional
 def BackupCompleteHandler(operation, job_id, mapreduce_state):
   """Updates BackupInformation record for a completed mapper job."""
   mapreduce_spec = mapreduce_state.mapreduce_spec
-  kind = mapreduce_spec.mapper.params['entity_kind']
-  backup_info = BackupInformation.get(mapreduce_spec.params['backup_info_pk'])
+  _perform_backup_complete(operation,
+                           job_id,
+                           mapreduce_spec.mapper.params['entity_kind'],
+                           mapreduce_spec.params['backup_info_pk'],
+                           mapreduce_spec.mapper.params.get('gs_bucket_name'),
+                           mapreduce_state.writer_state['filenames'],
+                           mapreduce_spec.params.get('done_callback_queue'))
+
+
+@db.transactional
+def _perform_backup_complete(
+    operation, job_id, kind, backup_info_pk, gs_bucket_name, filenames, queue):
+  backup_info = BackupInformation.get(backup_info_pk)
   if backup_info:
     if job_id in backup_info.active_jobs:
       backup_info.active_jobs.remove(job_id)
       backup_info.completed_jobs = list(
           set(backup_info.completed_jobs + [job_id]))
-    filenames = mapreduce_state.writer_state['filenames']
 
 
     if backup_info.filesystem == files.BLOBSTORE_FILESYSTEM:
       filenames = drop_empty_files(filenames)
-    if backup_info.blob_files:
-
-
-
-
-      backup_info.blob_files = list(set(backup_info.blob_files + filenames))
-      backup_info.put(force_writes=True)
+    kind_backup_files = backup_info.get_kind_backup_files([kind])[0]
+    if kind_backup_files:
+      kind_backup_files.files = list(set(kind_backup_files.files + filenames))
     else:
-      kind_backup_files = backup_info.get_kind_backup_files([kind])[0]
-      if kind_backup_files:
-        kind_backup_files.files = list(set(kind_backup_files.files + filenames))
-      else:
-        kind_backup_files = backup_info.create_kind_backup_files(kind,
-                                                                 filenames)
-      db.put((backup_info, kind_backup_files), force_writes=True)
+      kind_backup_files = backup_info.create_kind_backup_files(kind, filenames)
+    db.put((backup_info, kind_backup_files), force_writes=True)
     if operation.status == utils.DatastoreAdminOperation.STATUS_COMPLETED:
       deferred.defer(finalize_backup_info, backup_info.key(),
-                     mapreduce_spec.mapper.params,
-                     _url=utils.ConfigDefaults.DEFERRED_PATH,
-                     _queue=mapreduce_spec.params.get('done_callback_queue'),
+                     gs_bucket_name,
+                     _url=config.DEFERRED_PATH,
+                     _queue=queue,
                      _transactional=True)
   else:
-    logging.warn('BackupInfo was not found for %s',
-                 mapreduce_spec.params['backup_info_pk'])
+    logging.warn('BackupInfo was not found for %s', backup_info_pk)
 
 
-def finalize_backup_info(backup_info_pk, mapper_params):
+def finalize_backup_info(backup_info_pk, gs_bucket):
   """Finalize the state of BackupInformation and creates info file for GS."""
 
+  def get_backup_info():
+    return BackupInformation.get(backup_info_pk)
 
-  def tx():
-    backup_info = BackupInformation.get(backup_info_pk)
-    if backup_info:
-      backup_info.complete_time = datetime.datetime.now()
-      if backup_info.filesystem == files.GS_FILESYSTEM:
-        gs_bucket = mapper_params['gs_bucket_name']
-        BackupInfoWriter(gs_bucket).write(backup_info)
+  backup_info = db.run_in_transaction(get_backup_info)
+  if backup_info:
+    complete_time = datetime.datetime.now()
+    backup_info.complete_time = complete_time
+    if backup_info.filesystem == files.GS_FILESYSTEM:
+
+
+
+      BackupInfoWriter(gs_bucket).write(backup_info)
+
+    def set_backup_info_complete_time():
+      backup_info = get_backup_info()
+      backup_info.complete_time = complete_time
       backup_info.put(force_writes=True)
-      logging.info('Backup %s completed', backup_info.name)
-    else:
-      logging.warn('Backup %s could not be found', backup_info_pk)
-  db.run_in_transaction(tx)
+    db.run_in_transaction(set_backup_info_complete_time)
+    logging.info('Backup %s completed', backup_info.name)
+  else:
+    logging.warn('Backup %s could not be found', backup_info_pk)
 
 
 def parse_backup_info_file(content):
@@ -1074,8 +1128,16 @@
     Returns:
       A list with all created filenames.
     """
+    def get_backup_files_tx():
+      kind_backup_files_list = []
+
+      for kind_backup_files in backup_info.get_kind_backup_files():
+        kind_backup_files_list.append(kind_backup_files)
+      return kind_backup_files_list
+
+    kind_backup_files_list = db.run_in_transaction(get_backup_files_tx)
     filenames = []
-    for kind_backup_files in backup_info.get_kind_backup_files():
+    for kind_backup_files in kind_backup_files_list:
       backup = self._create_kind_backup(backup_info, kind_backup_files)
       filename = self._generate_filename(
           backup_info, '.%s.backup_info' % kind_backup_files.backup_kind)
@@ -1585,33 +1647,33 @@
     bucket_name: The bucket name to validate.
 
   Raises:
-    BackupValidationException: If the bucket name is invalid.
+    BackupValidationError: If the bucket name is invalid.
   """
   if len(bucket_name) > MAX_BUCKET_LEN:
-    raise BackupValidationException(
+    raise BackupValidationError(
         'Bucket name length should not be longer than %d' % MAX_BUCKET_LEN)
   if len(bucket_name) < MIN_BUCKET_LEN:
-    raise BackupValidationException(
+    raise BackupValidationError(
         'Bucket name length should be longer than %d' % MIN_BUCKET_LEN)
   if bucket_name.lower().startswith('goog'):
-    raise BackupValidationException(
+    raise BackupValidationError(
         'Bucket name should not start with a "goog" prefix')
   bucket_elements = bucket_name.split('.')
   for bucket_element in bucket_elements:
     if len(bucket_element) > MAX_BUCKET_SEGMENT_LEN:
-      raise BackupValidationException(
+      raise BackupValidationError(
           'Segment length of bucket name should not be longer than %d' %
           MAX_BUCKET_SEGMENT_LEN)
   if not re.match(BUCKET_PATTERN, bucket_name):
-    raise BackupValidationException('Invalid bucket name "%s"' % bucket_name)
+    raise BackupValidationError('Invalid bucket name "%s"' % bucket_name)
 
 
 def is_accessible_bucket_name(bucket_name):
   """Returns True if the application has access to the specified bucket."""
-  scope = 'https://www.googleapis.com/auth/devstorage.read_write'
-  url = 'https://%s.commondatastorage.googleapis.com/' % bucket_name
+  scope = config.GoogleApiScope('devstorage.read_write')
+  bucket_url = config.GsBucketURL(bucket_name)
   auth_token, _ = app_identity.get_access_token(scope)
-  result = urlfetch.fetch(url, method=urlfetch.HEAD, headers={
+  result = urlfetch.fetch(bucket_url, method=urlfetch.HEAD, headers={
       'Authorization': 'OAuth %s' % auth_token,
       'x-goog-api-version': '2'})
   return result and result.status_code == 200
@@ -1624,7 +1686,7 @@
     bucket_name: The bucket to verify.
 
   Raises:
-    BackupValidationException: If the bucket is not writable.
+    BackupValidationError: If the bucket is not writable.
   """
   path = '/gs/%s' % bucket_name
   try:
@@ -1632,9 +1694,9 @@
                                   {'prefix': TEST_WRITE_FILENAME_PREFIX,
                                    'max_keys': MAX_KEYS_LIST_SIZE})
   except (files.InvalidParameterError, files.PermissionDeniedError):
-    raise BackupValidationException('Bucket "%s" not accessible' % bucket_name)
+    raise BackupValidationError('Bucket "%s" not accessible' % bucket_name)
   except files.InvalidFileNameError:
-    raise BackupValidationException('Bucket "%s" does not exist' % bucket_name)
+    raise BackupValidationError('Bucket "%s" does not exist' % bucket_name)
   file_name = '%s/%s.tmp' % (path, TEST_WRITE_FILENAME_PREFIX)
   file_name_try = 0
   while True:
@@ -1654,7 +1716,7 @@
     finally:
       test_file.close(finalize=True)
   except files.PermissionDeniedError:
-    raise BackupValidationException('Bucket "%s" is not writable' % bucket_name)
+    raise BackupValidationError('Bucket "%s" is not writable' % bucket_name)
   try:
     files.delete(file_name)
   except (files.InvalidArgumentError, files.InvalidFileNameError, IOError):
@@ -1679,15 +1741,24 @@
     if filesystem == 'gs':
       gs_handle = gs_handle[4:]
     else:
-      raise BackupValidationException('Unsupported filesystem: %s' % filesystem)
+      raise BackupValidationError('Unsupported filesystem: %s' % filesystem)
   tokens = gs_handle.split('/', 1)
   return (tokens[0], '') if len(tokens) == 1 else tuple(tokens)
 
 
+def validate_and_canonicalize_gs_bucket(gs_bucket_name):
+  bucket_name, path = parse_gs_handle(gs_bucket_name)
+  gs_bucket_name = ('%s/%s' % (bucket_name, path)).rstrip('/')
+  validate_gs_bucket_name(bucket_name)
+  verify_bucket_writable(bucket_name)
+  return gs_bucket_name
+
+
 def list_bucket_files(bucket_name, prefix, max_keys=1000):
   """Returns a listing of of a bucket that matches the given prefix."""
-  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
-  url = 'https://%s.commondatastorage.googleapis.com/?' % bucket_name
+  scope = config.GoogleApiScope('devstorage.read_only')
+  bucket_url = config.GsBucketURL(bucket_name)
+  url = bucket_url + '?'
   query = [('max-keys', max_keys)]
   if prefix:
     query.append(('prefix', prefix))
@@ -1699,20 +1770,22 @@
   if result and result.status_code == 200:
     doc = xml.dom.minidom.parseString(result.content)
     return [node.childNodes[0].data for node in doc.getElementsByTagName('Key')]
-  raise BackupValidationException('Request to Google Cloud Storage failed')
+  raise BackupValidationError('Request to Google Cloud Storage failed')
 
 
 def get_gs_object(bucket_name, path):
   """Returns a listing of of a bucket that matches the given prefix."""
-  scope = 'https://www.googleapis.com/auth/devstorage.read_only'
-  url = 'https://%s.commondatastorage.googleapis.com/%s' % (bucket_name, path)
+  scope = config.GoogleApiScope('devstorage.read_only')
+  bucket_url = config.GsBucketURL(bucket_name)
+  url = bucket_url + path
   auth_token, _ = app_identity.get_access_token(scope)
   result = urlfetch.fetch(url, method=urlfetch.GET, headers={
       'Authorization': 'OAuth %s' % auth_token,
       'x-goog-api-version': '2'})
   if result and result.status_code == 200:
     return result.content
-  raise BackupValidationException('Requested path was not found')
+  raise BackupValidationError('Requested path %s was not found' % url)
+
 
 
 
diff --git a/google/appengine/ext/datastore_admin/config.py b/google/appengine/ext/datastore_admin/config.py
new file mode 100644
index 0000000..a0ee391
--- /dev/null
+++ b/google/appengine/ext/datastore_admin/config.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""App Engine Datastore Admin configuration module.
+
+Contains global configuration settings for various deployment environments.
+
+Configuration values are added as class attributes to the respective
+environment classes.  The environments form a hierarchy of configurations
+that inherit from one another.
+
+At module loading time one module is selected to be the Current module.
+This is determined by examining the DATACENTER environment variable.
+See GetConfig for details.
+
+Defining values:
+
+  New configuration values should be introduced by simply adding constants
+  to the Default class and adding specialized values when needed to override
+  those values specifically for each cluster type.  For example, let's say
+  we need to configure the Admin Console URL:
+
+    class Default(object):
+      ...
+      ADMIN_CONSOLE_URL = 'https://appengine.google.com'
+
+
+    class Local(Default):
+      ...
+      ADMIN_CONSOLE_URL = 'https://127.0.0.1:8000'
+
+
+Using values:
+
+  All values of the Current configuration are imported up to the top level of
+  this module.  Access to the configuration values should be done via the module
+  directly. Note that changing configuration values at runtime is NOT supported.
+  It is assumed that values in this configuration module are constants.
+"""
+
+import os
+
+
+class Default(object):
+  """Configuration object."""
+
+
+  BASE_PATH = '/_ah/datastore_admin'
+  MAPREDUCE_PATH = '/_ah/mapreduce'
+  DEFERRED_PATH = BASE_PATH + '/queue/deferred'
+  CLEANUP_MAPREDUCE_STATE = True
+
+  DEFAULT_APP_DOMAIN = 'placeholder.com'
+  GOOGLE_API_HOSTNAME = 'www.googleapis.com'
+  GOOGLE_API_OAUTH_SCOPE_HOSTNAME = 'https://www.googleapis.com'
+  GS_API_HOSTNAME = 'storage.googleapis.com'
+  ADMIN_API_APP_ID = 'admin-api'
+  ADMIN_API_APP_VERSION = None
+  ADMIN_API_NAME = 'appengine'
+  ADMIN_API_VERSION = 'vdev'
+  ADMIN_API_VALIDATE_SSL = True
+  ADMIN_CONSOLE_URL = 'https://appengine.google.com'
+
+  @property
+  def GOOGLE_API_HOST(self):
+    return 'https://%s' % self.GOOGLE_API_HOSTNAME
+
+  def GoogleApiScope(self, scope_type):
+    return '%s/%s' % (self.GOOGLE_API_AUTH, scope_type)
+
+  @property
+  def GOOGLE_API_AUTH(self):
+    return '%s/auth' % self.GOOGLE_API_OAUTH_SCOPE_HOSTNAME
+
+  @property
+  def DISCOVERY_URL(self):
+    if self.ADMIN_API_APP_VERSION:
+      hostname = '%s-dot-%s.%s' % (self.ADMIN_API_APP_VERSION,
+                                   self.ADMIN_API_APP_ID,
+                                   self.DEFAULT_APP_DOMAIN)
+    else:
+      hostname = '%s.%s' % (self.ADMIN_API_APP_ID, self.DEFAULT_APP_DOMAIN)
+    path = '_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest'
+    return 'https://%s/%s' % (hostname, path)
+
+  def GsBucketURL(self, bucket_name):
+    return 'https://%s/%s/' % (self.GS_API_HOSTNAME, bucket_name)
+
+
+class Local(Default):
+  """Dev-appserver configuration."""
+
+
+class Prod(Default):
+  """Production cluster configuration."""
+  DEFAULT_APP_DOMAIN = 'appspot.com'
+
+
+
+
+try:
+
+  import config_runtime
+
+  RUNTIME_DATACENTER_TO_CLASS = config_runtime.RUNTIME_DATACENTER_TO_CLASS
+except ImportError:
+  RUNTIME_DATACENTER_TO_CLASS = {}
+
+
+def GetConfig():
+  """Determine configuration class based on the runtime environment.
+
+  The DATACENTER environment variable is useful for determining which App
+  Engine cluster type this services application is deployed on. All
+  dev-appservers have no DATACENTER variable set. Production does not have any
+  prefix at all.
+
+  Returns:
+    Class of the configuration determined by examining the runtime environment.
+  """
+  datacenter = os.environ.get('DATACENTER')
+  if not datacenter:
+    return Local
+  for prefix, config in RUNTIME_DATACENTER_TO_CLASS.items():
+    if datacenter.startswith(prefix):
+      return config
+  return Prod
+
+
+def Export(cls):
+  """Export public class values to the config module."""
+  global current
+  current = cls()
+  for name in dir(current):
+    if not name.startswith('_'):
+      globals()[name] = getattr(current, name)
+
+
+current = None
+Export(GetConfig())
diff --git a/google/appengine/ext/datastore_admin/copy_handler.py b/google/appengine/ext/datastore_admin/copy_handler.py
index 6dcba3b..d7c38c4 100644
--- a/google/appengine/ext/datastore_admin/copy_handler.py
+++ b/google/appengine/ext/datastore_admin/copy_handler.py
@@ -39,6 +39,7 @@
 from google.appengine.datastore import datastore_rpc
 from google.appengine.ext import blobstore
 from google.appengine.ext import webapp
+from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import remote_api_put_stub
 from google.appengine.ext.datastore_admin import utils
 from google.appengine.ext.mapreduce import context
@@ -79,7 +80,7 @@
         'sizes_known': sizes_known,
         'size_total': size_total,
         'app_id': handler.request.get('app_id'),
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'kind_str': kind_str,
         'namespace_str': namespace_str,
         'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
@@ -100,8 +101,8 @@
   COPY_HANDLER = ('google.appengine.ext.datastore_admin.copy_handler.'
                   'RemoteCopyEntity.map')
   INPUT_READER = ('google.appengine.ext.mapreduce.input_readers.'
-                  'ConsistentKeyReader')
-  MAPREDUCE_DETAIL = utils.config.MAPREDUCE_PATH + '/detail?mapreduce_id='
+                  'DatastoreKeyInputReader')
+  MAPREDUCE_DETAIL = config.MAPREDUCE_PATH + '/detail?mapreduce_id='
 
   def get(self):
     """Handler for get requests to datastore_admin/copy.do.
@@ -117,7 +118,7 @@
         'mapreduce_detail': self.MAPREDUCE_DETAIL,
         'error': error,
         'xsrf_error': xsrf_error,
-        'datastore_admin_home': utils.config.BASE_PATH,
+        'datastore_admin_home': config.BASE_PATH,
     }
     utils.RenderToResponse(self, 'do_copy.html', template_params)
 
@@ -177,7 +178,7 @@
         parameters.append(('error', error))
 
     query = urllib.urlencode(parameters)
-    self.redirect('%s/%s?%s' % (utils.config.BASE_PATH, self.SUFFIX, query))
+    self.redirect('%s/%s?%s' % (config.BASE_PATH, self.SUFFIX, query))
 
   def _HandleException(self, e):
     """Make exception handling overrideable by tests.
diff --git a/google/appengine/ext/datastore_admin/delete_handler.py b/google/appengine/ext/datastore_admin/delete_handler.py
index 5dda075..97ad029 100644
--- a/google/appengine/ext/datastore_admin/delete_handler.py
+++ b/google/appengine/ext/datastore_admin/delete_handler.py
@@ -28,8 +28,10 @@
 import re
 import urllib
 
+from google.appengine.api import capabilities
 from google.appengine.api import datastore
 from google.appengine.ext import webapp
+from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import utils
 from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import operation
@@ -81,19 +83,22 @@
     Args:
       handler: the webapp.RequestHandler invoking the method
     """
+    readonly_warning = not capabilities.CapabilitySet(
+        'datastore_v3', capabilities=['write']).is_enabled()
     namespace = handler.request.get('namespace')
     kinds = handler.request.get_all('kind')
     sizes_known, size_total, remainder = utils.ParseKindsAndSizes(kinds)
 
     (namespace_str, kind_str) = utils.GetPrintableStrs(namespace, kinds)
     template_params = {
+        'readonly_warning': readonly_warning,
         'form_target': DoDeleteHandler.SUFFIX,
         'kind_list': kinds,
         'remainder': remainder,
         'sizes_known': sizes_known,
         'size_total': size_total,
         'app_id': handler.request.get('app_id'),
-        'cancel_url': handler.request.get('cancel_url'),
+        'datastore_admin_home': utils.GenerateHomeUrl(handler.request),
         'kind_str': kind_str,
         'namespace_str': namespace_str,
         'xsrf_token': utils.CreateXsrfToken(XSRF_ACTION),
@@ -113,7 +118,7 @@
       'google.appengine.ext.datastore_admin.delete_handler.DeleteEntity')
   INPUT_READER = (
       'google.appengine.ext.mapreduce.input_readers.DatastoreKeyInputReader')
-  MAPREDUCE_DETAIL = utils.config.MAPREDUCE_PATH + '/detail?mapreduce_id='
+  MAPREDUCE_DETAIL = config.MAPREDUCE_PATH + '/detail?mapreduce_id='
 
   def get(self):
     """Handler for get requests to datastore_admin/delete.do.
@@ -123,13 +128,15 @@
     jobs = self.request.get_all('job')
     error = self.request.get('error', '')
     xsrf_error = self.request.get('xsrf_error', '')
+    noconfirm_error = self.request.get('noconfirm_error', '')
 
     template_params = {
         'job_list': jobs,
         'mapreduce_detail': self.MAPREDUCE_DETAIL,
         'error': error,
         'xsrf_error': xsrf_error,
-        'datastore_admin_home': utils.config.BASE_PATH,
+        'noconfirm_error': noconfirm_error,
+        'datastore_admin_home': config.BASE_PATH,
     }
     utils.RenderToResponse(self, 'do_delete.html', template_params)
 
@@ -142,43 +149,52 @@
     kinds = self.request.get_all('kind')
     (namespace_str, kinds_str) = utils.GetPrintableStrs(namespace, kinds)
     token = self.request.get('xsrf_token')
+    readonly_warning = self.request.get('readonly_warning')
 
     jobs = []
-    if utils.ValidateXsrfToken(token, XSRF_ACTION):
-      try:
-        op = utils.StartOperation(
-            'Deleting %s%s' % (kinds_str, namespace_str))
-        name_template = 'Delete all %(kind)s objects%(namespace)s'
-        queue = self.request.get('queue')
-        queue = queue or os.environ.get('HTTP_X_APPENGINE_QUEUENAME', 'default')
-        if queue[0] == '_':
 
-          queue = 'default'
-        jobs = utils.RunMapForKinds(
-            op.key(),
-            kinds,
-            name_template,
-            self.DELETE_HANDLER,
-            self.INPUT_READER,
-            None,
-            {},
-            queue_name=queue,
-            max_shard_count=utils.MAPREDUCE_DEFAULT_SHARDS)
-        error = ''
-
-
-      except Exception, e:
-        error = self._HandleException(e)
-
-      parameters = [('job', job) for job in jobs]
-      if error:
-        parameters.append(('error', error))
+    if (readonly_warning == 'True') and not self.request.get(
+        'confirm_readonly_delete'):
+      parameters = [('noconfirm_error', '1')]
     else:
-      parameters = [('xsrf_error', '1')]
+      if utils.ValidateXsrfToken(token, XSRF_ACTION):
+        try:
+          op = utils.StartOperation(
+              'Deleting %s%s' % (kinds_str, namespace_str))
+          name_template = 'Delete all %(kind)s objects%(namespace)s'
+          mapreduce_params = {'force_ops_writes': True}
+          queue = self.request.get('queue')
+          queue = queue or os.environ.get(
+              'HTTP_X_APPENGINE_QUEUENAME', 'default')
+          if queue[0] == '_':
+
+            queue = 'default'
+          jobs = utils.RunMapForKinds(
+              op.key(),
+              kinds,
+              name_template,
+              self.DELETE_HANDLER,
+              self.INPUT_READER,
+              None,
+              {},
+              mapreduce_params=mapreduce_params,
+              queue_name=queue,
+              max_shard_count=utils.MAPREDUCE_DEFAULT_SHARDS)
+          error = ''
+
+
+        except Exception, e:
+          error = self._HandleException(e)
+
+        parameters = [('job', job) for job in jobs]
+        if error:
+          parameters.append(('error', error))
+      else:
+        parameters = [('xsrf_error', '1')]
 
     query = urllib.urlencode(parameters)
 
-    self.redirect('%s/%s?%s' % (utils.config.BASE_PATH, self.SUFFIX, query))
+    self.redirect('%s/%s?%s' % (config.BASE_PATH, self.SUFFIX, query))
 
   def _HandleException(self, e):
     """Make exception handling overrideable by tests.
diff --git a/google/appengine/ext/datastore_admin/main.py b/google/appengine/ext/datastore_admin/main.py
index 9283b07..7019bbe 100644
--- a/google/appengine/ext/datastore_admin/main.py
+++ b/google/appengine/ext/datastore_admin/main.py
@@ -26,8 +26,10 @@
 """
 
 
+import logging
 import operator
 import os
+import time
 
 from google.appengine.api import app_identity
 from google.appengine.api import datastore_errors
@@ -35,6 +37,7 @@
 from google.appengine.ext import deferred
 from google.appengine.ext import webapp
 from google.appengine.ext.datastore_admin import backup_handler
+from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.datastore_admin import copy_handler
 from google.appengine.ext.datastore_admin import delete_handler
 from google.appengine.ext.datastore_admin import utils
@@ -70,6 +73,9 @@
                     backup_handler.ConfirmBackupImportHandler.Render})
 
 
+MAX_RPCS = 10
+
+
 def _GetDatastoreStats(kinds_list, use_stats_kinds=False):
   """Retrieves stats for kinds.
 
@@ -80,7 +86,6 @@
 
   Returns:
     timestamp: records time that statistics were last updated.
-    global_size: total size of all known kinds.
     kind_dict: dictionary of kind objects with the following members:
     - kind_name: the name of this kind.
     - count: number of known entities of this type.
@@ -147,19 +152,23 @@
     """Handler for get requests to datastore_admin/confirm_delete."""
     use_stats_kinds = False
     kinds = []
+    more_kinds = False
     try:
-      kinds = self.GetKinds()
+      kinds, more_kinds = self.GetKinds()
       if not kinds:
         use_stats_kinds = True
-    except datastore_errors.Error:
+        logging.warning('Found no kinds. Using datastore stats instead.')
+    except datastore_errors.Error, e:
+      logging.exception(e)
       use_stats_kinds = True
 
     last_stats_update, kind_stats = _GetDatastoreStats(
         kinds, use_stats_kinds=use_stats_kinds)
 
     template_params = {
+        'run_as_a_service': self.request.get('run_as_a_service'),
         'kind_stats': kind_stats,
-        'cancel_url': self.request.path + '?' + self.request.query_string,
+        'more_kinds': more_kinds,
         'last_stats_update': last_stats_update,
         'app_id': self.request.get('app_id'),
         'hosting_app_id': app_identity.get_application_id(),
@@ -173,7 +182,7 @@
         'active_operations': self.GetOperations(active=True),
         'pending_backups': self.GetPendingBackups(),
         'backups': self.GetBackups(),
-        'map_reduce_path': utils.config.MAPREDUCE_PATH + '/detail'
+        'map_reduce_path': config.MAPREDUCE_PATH + '/detail'
     }
     utils.RenderToResponse(self, 'list_actions.html', template_params)
 
@@ -193,54 +202,110 @@
   def post(self):
     self.RouteAction(GET_ACTIONS)
 
-  def GetKinds(self, all_ns=True):
+  def GetKinds(self, all_ns=True, deadline=40):
     """Obtain a list of all kind names from the datastore.
 
     Args:
       all_ns: If true, list kind names for all namespaces.
               If false, list kind names only for the current namespace.
+      deadline: maximum number of seconds to spend getting kinds.
 
     Returns:
-      An alphabetized list of kinds for the specified namespace(s).
+      kinds: an alphabetized list of kinds for the specified namespace(s).
+      more_kinds: a boolean indicating whether there may be additional kinds
+          not included in 'kinds' (e.g. because the query deadline was reached).
     """
     if all_ns:
-      result = self.GetKindsForAllNamespaces()
+      kinds, more_kinds = self.GetKindsForAllNamespaces(deadline)
     else:
-      result = self.GetKindsForCurrentNamespace()
-    return result
+      kinds, more_kinds = self.GetKindsForCurrentNamespace(deadline)
+    return kinds, more_kinds
 
-  def GetKindsForAllNamespaces(self):
-    """Obtain a list of all kind names from the datastore, *regardless*
-    of namespace.  The result is alphabetized and deduped."""
+  def GetKindsForAllNamespaces(self, deadline):
+    """Obtain a list of all kind names from the datastore.
 
+    Pulls kinds from all namespaces. The result is deduped and alphabetized.
 
-    namespace_list = [ns.namespace_name
-                      for ns in metadata.Namespace.all().run(limit=99999999)]
-    kind_itr_list = [metadata.Kind.all(namespace=ns).run(limit=99999999,
-                                                         batch_size=99999999)
-                     for ns in namespace_list]
+    Args:
+      deadline: maximum number of seconds to spend getting kinds.
 
-
+    Returns:
+      kinds: an alphabetized list of kinds for the specified namespace(s).
+      more_kinds: a boolean indicating whether there may be additional kinds
+          not included in 'kinds' (e.g. because the query deadline was reached).
+    """
+    start = time.time()
     kind_name_set = set()
-    for kind_itr in kind_itr_list:
-      for kind in kind_itr:
+
+    def ReadFromKindIters(kind_iter_list):
+      """Read kinds from a list of iterators.
+
+      Reads a kind from each iterator in kind_iter_list, adds it to
+      kind_name_set, and removes any completed iterators.
+
+      Args:
+        kind_iter_list: a list of iterators of kinds.
+      """
+      completed = []
+      for kind_iter in kind_iter_list:
+        try:
+          kind_name = kind_iter.next().kind_name
+          if utils.IsKindNameVisible(kind_name):
+            kind_name_set.add(kind_name)
+        except StopIteration:
+          completed.append(kind_iter)
+      for kind_iter in completed:
+        kind_iter_list.remove(kind_iter)
+
+    more_kinds = False
+    try:
+      namespace_iter = metadata.Namespace.all().run(batch_size=1000,
+                                                    deadline=deadline)
+      kind_iter_list = []
+      for ns in namespace_iter:
+
+
+        remaining = deadline - (time.time() - start)
+
+        if remaining <= 0:
+          raise datastore_errors.Timeout
+        kind_iter_list.append(metadata.Kind.all(namespace=ns.namespace_name)
+                              .run(batch_size=1000, deadline=remaining))
+        while len(kind_iter_list) == MAX_RPCS:
+          ReadFromKindIters(kind_iter_list)
+      while kind_iter_list:
+        ReadFromKindIters(kind_iter_list)
+    except datastore_errors.Timeout:
+      more_kinds = True
+      logging.warning('Failed to retrieve all kinds within deadline.')
+    return sorted(kind_name_set), more_kinds
+
+  def GetKindsForCurrentNamespace(self, deadline):
+    """Obtain a list of all kind names from the datastore.
+
+    Pulls kinds from the current namespace only. The result is alphabetized.
+
+    Args:
+      deadline: maximum number of seconds to spend getting kinds.
+
+    Returns:
+      kinds: an alphabetized list of kinds for the specified namespace(s).
+      more_kinds: a boolean indicating whether there may be additional kinds
+          not included in 'kinds' (e.g. because the query limit was reached).
+    """
+    more_kinds = False
+    kind_names = []
+    try:
+      kinds = metadata.Kind.all().order('__key__').run(batch_size=1000,
+                                                       deadline=deadline)
+      for kind in kinds:
         kind_name = kind.kind_name
         if utils.IsKindNameVisible(kind_name):
-          kind_name_set.add(kind.kind_name)
-
-    kind_name_list = sorted(kind_name_set)
-    return kind_name_list
-
-  def GetKindsForCurrentNamespace(self):
-    """Obtain a list of all kind names from the datastore for the
-    current namespace.  The result is alphabetized."""
-    kinds = metadata.Kind.all().order('__key__').fetch(99999999)
-    kind_names = []
-    for kind in kinds:
-      kind_name = kind.kind_name
-      if utils.IsKindNameVisible(kind_name):
-        kind_names.append(kind_name)
-    return kind_names
+          kind_names.append(kind_name)
+    except datastore_errors.Timeout:
+      more_kinds = True
+      logging.warning('Failed to retrieve all kinds within deadline.')
+    return kind_names, more_kinds
 
   def GetOperations(self, active=False, limit=100):
     """Obtain a list of operation, ordered by last_updated."""
@@ -295,7 +360,7 @@
   }
 
   def get(self):
-    relative_path = self.request.path.split(utils.config.BASE_PATH + '/')[1]
+    relative_path = self.request.path.split(config.BASE_PATH + '/')[1]
     if relative_path not in self._RESOURCE_MAP:
       self.response.set_status(404)
       self.response.out.write('Resource not found.')
@@ -334,22 +399,20 @@
     an instance of webapp.WSGIApplication with all mapreduce handlers
     registered.
   """
-  return webapp.WSGIApplication([
-      (r'%s/%s' % (utils.config.BASE_PATH,
-                   delete_handler.ConfirmDeleteHandler.SUFFIX),
-       delete_handler.ConfirmDeleteHandler),
-      (r'%s/%s' % (utils.config.BASE_PATH,
-                   delete_handler.DoDeleteHandler.SUFFIX),
-       delete_handler.DoDeleteHandler),
-      (r'%s/%s' % (utils.config.BASE_PATH,
-                   utils.MapreduceDoneHandler.SUFFIX),
-       utils.MapreduceDoneHandler),
-      (utils.config.DEFERRED_PATH, deferred.TaskHandler)]
-      + copy_handler.handlers_list(utils.config.BASE_PATH)
-      + backup_handler.handlers_list(utils.config.BASE_PATH)
-      + [(r'%s/static.*' % utils.config.BASE_PATH, StaticResourceHandler),
-         (r'/_ah/login_required', LoginRequiredHandler),
-         (r'.*', RouteByActionHandler)])
+  return webapp.WSGIApplication(
+      backup_handler.handlers_list(config.BASE_PATH) +
+      copy_handler.handlers_list(config.BASE_PATH) +
+      [(r'%s/%s' % (config.BASE_PATH,
+                    delete_handler.ConfirmDeleteHandler.SUFFIX),
+        delete_handler.ConfirmDeleteHandler),
+       (r'%s/%s' % (config.BASE_PATH, delete_handler.DoDeleteHandler.SUFFIX),
+        delete_handler.DoDeleteHandler),
+       (r'%s/%s' % (config.BASE_PATH, utils.MapreduceDoneHandler.SUFFIX),
+        utils.MapreduceDoneHandler),
+       (config.DEFERRED_PATH, deferred.TaskHandler),
+       (r'%s/static.*' % config.BASE_PATH, StaticResourceHandler),
+       (r'/_ah/login_required', LoginRequiredHandler),
+       (r'.*', RouteByActionHandler)])
 
 
 APP = CreateApplication()
diff --git a/google/appengine/ext/datastore_admin/static/js/compiled.js b/google/appengine/ext/datastore_admin/static/js/compiled.js
index 60a9dea..9d71f3a 100644
--- a/google/appengine/ext/datastore_admin/static/js/compiled.js
+++ b/google/appengine/ext/datastore_admin/static/js/compiled.js
@@ -1,19 +1,20 @@
-var h=document,m=Array,p=Error,q=parseInt,aa=String;function ba(a,b){return a.currentTarget=b}function ca(a,b){return a.keyCode=b}function da(a,b){return a.length=b}function r(a,b){return a.disabled=b}
-var t="push",u="shift",v="slice",w="replace",x="value",ea="preventDefault",y="indexOf",z="keyCode",B="type",fa="name",C="length",ga="propertyIsEnumerable",D="prototype",E="split",ha="target",F="call",G,H=this,I=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof m)return"array";if(a instanceof Object)return b;var c=Object[D].toString[F](a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a[C]&&"undefined"!=typeof a.splice&&"undefined"!=typeof a[ga]&&!a[ga]("splice"))return"array";
-if("[object Function]"==c||"undefined"!=typeof a[F]&&"undefined"!=typeof a[ga]&&!a[ga]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[F])return"object";return b},ia=function(a){var b=I(a);return"array"==b||"object"==b&&"number"==typeof a[C]},J=function(a){return"string"==typeof a},ja=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},K="closure_uid_"+(1E9*Math.random()>>>0),ka=0,la=function(a,b){function c(){}c.prototype=b[D];a.u=b[D];a.prototype=
-new c};var L=function(a){p.captureStackTrace?p.captureStackTrace(this,L):this.stack=p().stack||"";a&&(this.message=aa(a))};la(L,p);L[D].name="CustomError";var ma=function(a,b){for(var c=a[E]("%s"),d="",f=m[D][v][F](arguments,1);f[C]&&1<c[C];)d+=c[u]()+f[u]();return d+c.join("%s")},sa=function(a,b){if(b)return a[w](na,"&amp;")[w](oa,"&lt;")[w](pa,"&gt;")[w](qa,"&quot;");if(!ra.test(a))return a;-1!=a[y]("&")&&(a=a[w](na,"&amp;"));-1!=a[y]("<")&&(a=a[w](oa,"&lt;"));-1!=a[y](">")&&(a=a[w](pa,"&gt;"));-1!=a[y]('"')&&(a=a[w](qa,"&quot;"));return a},na=/&/g,oa=/</g,pa=/>/g,qa=/\"/g,ra=/[&<>\"]/;var ta=function(a,b){b.unshift(a);L[F](this,ma.apply(null,b));b[u]()};la(ta,L);ta[D].name="AssertionError";var M=function(a,b,c){if(!a){var d=m[D][v][F](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),e=d;throw new ta(""+f,e||[]);}return a};var N=m[D],ua=N[y]?function(a,b,c){M(null!=a[C]);return N[y][F](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[C]+c):c;if(J(a))return J(b)&&1==b[C]?a[y](b,c):-1;for(;c<a[C];c++)if(c in a&&a[c]===b)return c;return-1},va=N.forEach?function(a,b,c){M(null!=a[C]);N.forEach[F](a,b,c)}:function(a,b,c){for(var d=a[C],f=J(a)?a[E](""):a,e=0;e<d;e++)e in f&&b[F](c,f[e],e,a)},wa=function(a){var b=a[C];if(0<b){for(var c=m(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},xa=function(a,b,c){M(null!=a[C]);return 2>=
-arguments[C]?N[v][F](a,b):N[v][F](a,b,c)};var ya=function(a,b,c){for(var d in a)b[F](c,a[d],d,a)},za="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Aa=function(a,b){for(var c,d,f=1;f<arguments[C];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<za[C];e++)c=za[e],Object[D].hasOwnProperty[F](d,c)&&(a[c]=d[c])}};var O,Ba,Ca,Da,Ea=function(){return H.navigator?H.navigator.userAgent:null};Da=Ca=Ba=O=!1;var Fa;if(Fa=Ea()){var Ga=H.navigator;O=0==Fa[y]("Opera");Ba=!O&&-1!=Fa[y]("MSIE");Ca=!O&&-1!=Fa[y]("WebKit");Da=!O&&!Ca&&"Gecko"==Ga.product}var Ha=O,Q=Ba,R=Da,S=Ca,Ia=function(){var a=H.document;return a?a.documentMode:void 0},Ja;
-t:{var Ka="",T;if(Ha&&H.opera)var La=H.opera.version,Ka="function"==typeof La?La():La;else if(R?T=/rv\:([^\);]+)(\)|;)/:Q?T=/MSIE\s+([^\);]+)(\)|;)/:S&&(T=/WebKit\/(\S+)/),T)var Ma=T.exec(Ea()),Ka=Ma?Ma[1]:"";if(Q){var Na=Ia();if(Na>parseFloat(Ka)){Ja=aa(Na);break t}}Ja=Ka}
-var Oa=Ja,Pa={},U=function(a){var b;if(!(b=Pa[a])){b=0;for(var c=aa(Oa)[w](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),d=aa(a)[w](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),f=Math.max(c[C],d[C]),e=0;0==b&&e<f;e++){var g=c[e]||"",l=d[e]||"",k=RegExp("(\\d*)(\\D*)","g"),P=RegExp("(\\d*)(\\D*)","g");do{var n=k.exec(g)||["","",""],s=P.exec(l)||["","",""];if(0==n[0][C]&&0==s[0][C])break;b=((0==n[1][C]?0:q(n[1],10))<(0==s[1][C]?0:q(s[1],10))?-1:(0==n[1][C]?0:q(n[1],10))>(0==s[1][C]?0:q(s[1],10))?1:0)||((0==n[2][C])<
-(0==s[2][C])?-1:(0==n[2][C])>(0==s[2][C])?1:0)||(n[2]<s[2]?-1:n[2]>s[2]?1:0)}while(0==b)}b=Pa[a]=0<=b}return b},Qa=H.document,Ra=Qa&&Q?Ia()||("CSS1Compat"==Qa.compatMode?q(Oa,10):5):void 0;var Sa=!Q||Q&&9<=Ra;!R&&!Q||Q&&Q&&9<=Ra||R&&U("1.9.1");Q&&U("9");var Ta=function(a,b){var c;c=a.className;c=J(c)&&c.match(/\S+/g)||[];for(var d=xa(arguments,1),f=c[C]+d[C],e=c,g=0;g<d[C];g++)0<=ua(e,d[g])||e[t](d[g]);a.className=c.join(" ");return c[C]==f};var V=function(a){return J(a)?h.getElementById(a):a},Ua=function(a,b,c,d){a=d||a;b=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(b||c))return a.querySelectorAll(b+(c?"."+c:""));if(c&&a.getElementsByClassName){a=a.getElementsByClassName(c);if(b){d={};for(var f=0,e=0,g;g=a[e];e++)b==g.nodeName&&(d[f++]=g);da(d,f);return d}return a}a=a.getElementsByTagName(b||"*");if(c){d={};for(e=f=0;g=a[e];e++)b=g.className,"function"==typeof b[E]&&0<=ua(b[E](/\s+/),c)&&(d[f++]=g);da(d,f);return d}return a},
-Wa=function(a,b){ya(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in Va?a.setAttribute(Va[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},Va={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},Ya=function(a,b,c){var d=arguments,f=d[0],e=
-d[1];if(!Sa&&e&&(e[fa]||e[B])){f=["<",f];e[fa]&&f[t](' name="',sa(e[fa]),'"');if(e[B]){f[t](' type="',sa(e[B]),'"');var g={};Aa(g,e);delete g[B];e=g}f[t](">");f=f.join("")}f=h.createElement(f);e&&(J(e)?f.className=e:"array"==I(e)?Ta.apply(null,[f].concat(e)):Wa(f,e));2<d[C]&&Xa(h,f,d,2);return f},Xa=function(a,b,c,d){function f(c){c&&b.appendChild(J(c)?a.createTextNode(c):c)}for(;d<c[C];d++){var e=c[d];if(!ia(e)||ja(e)&&0<e.nodeType)f(e);else{var g;t:{if(e&&"number"==typeof e[C]){if(ja(e)){g="function"==
-typeof e.item||"string"==typeof e.item;break t}if("function"==I(e)){g="function"==typeof e.item;break t}}g=!1}va(g?wa(e):e,f)}}};var Za=function(a){var b=a[B];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a.checked?a[x]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][x]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[t](c[x]);return b[C]?b:null;default:return void 0!==a[x]?a[x]:null}};var $a=function(a){$a[" "](a);return a};$a[" "]=function(){};var ab=!Q||Q&&9<=Ra,bb=Q&&!U("9");!S||U("528");R&&U("1.9b")||Q&&U("8")||Ha&&U("9.5")||S&&U("528");R&&!U("8")||Q&&U("9");var W=function(a,b){this.type=a;this.target=b;ba(this,this[ha])};W[D].m=!1;W[D].defaultPrevented=!1;W[D].preventDefault=function(){this.defaultPrevented=!0};var cb=function(a,b){a&&this.p(a,b)};la(cb,W);G=cb[D];G.target=null;G.relatedTarget=null;G.offsetX=0;G.offsetY=0;G.clientX=0;G.clientY=0;G.screenX=0;G.screenY=0;G.button=0;ca(G,0);G.charCode=0;G.ctrlKey=!1;G.altKey=!1;G.shiftKey=!1;G.metaKey=!1;G.q=null;
-G.p=function(a,b){var c=this.type=a[B];W[F](this,c);this.target=a[ha]||a.srcElement;ba(this,b);var d=a.relatedTarget;if(d){if(R){var f;t:{try{$a(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=S||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=S||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=
-a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ca(this,a[z]||0);this.charCode=a.charCode||("keypress"==c?a[z]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.q=a;a.defaultPrevented&&this[ea]();delete this.m};G.preventDefault=function(){cb.u[ea][F](this);var a=this.q;if(a[ea])a[ea]();else if(a.returnValue=!1,bb)try{(a.ctrlKey||112<=a[z]&&123>=a[z])&&ca(a,-1)}catch(b){}};var db="closure_listenable_"+(1E6*Math.random()|0),eb=0;var fb=function(a,b,c,d,f,e){this.e=a;this.o=b;this.src=c;this.type=d;this.capture=!!f;this.k=e;this.key=++eb;this.d=this.l=!1};fb[D].r=function(){this.d=!0;this.k=this.src=this.o=this.e=null};var gb={},X={},Y={},Z={},hb=function(a,b,c,d,f){if("array"==I(b)){for(var e=0;e<b[C];e++)hb(a,b[e],c,d,f);return null}c=ib(c);if(a&&a[db])a=a.v(b,c,d,f);else t:{if(!b)throw p("Invalid event type");d=!!d;var g=X;b in g||(g[b]={a:0,b:0});g=g[b];d in g||(g[d]={a:0,b:0},g.a++);var g=g[d],e=a[K]||(a[K]=++ka),l;g.b++;if(g[e]){l=g[e];for(var k=0;k<l[C];k++)if(g=l[k],g.e==c&&g.k==f){if(g.d)break;l[k].l=!1;a=l[k];break t}}else l=g[e]=[],g.a++;k=jb();g=new fb(c,k,a,b,d,f);g.l=!1;k.src=a;k.e=g;l[t](g);Y[e]||
-(Y[e]=[]);Y[e][t](g);a.addEventListener?a.addEventListener(b,k,d):a.attachEvent(b in Z?Z[b]:Z[b]="on"+b,k);a=gb[g.key]=g}return a},jb=function(){var a=kb,b=ab?function(c){return a[F](b.src,b.e,c)}:function(c){c=a[F](b.src,b.e,c);if(!c)return c};return b},lb=function(a,b,c,d){if(!d.j&&d.n){for(var f=0,e=0;f<d[C];f++)d[f].d||(f!=e&&(d[e]=d[f]),e++);da(d,e);d.n=!1;0==e&&(delete X[a][b][c],X[a][b].a--,0==X[a][b].a&&(delete X[a][b],X[a].a--),0==X[a].a&&delete X[a])}},nb=function(a,b,c,d,f){var e=1;b=b[K]||
-(b[K]=++ka);if(a[b]){var g=--a.b,l=a[b];l.j?l.j++:l.j=1;try{for(var k=l[C],P=0;P<k;P++){var n=l[P];n&&!n.d&&(e&=!1!==mb(n,f))}}finally{a.b=Math.max(g,a.b),l.j--,lb(c,d,b,l)}}return Boolean(e)},mb=function(a,b){var c=a.e,d=a.k||a.src;if(a.l&&"number"!=typeof a&&a&&!a.d){var f=a.src;if(f&&f[db])f.w(a);else{var e=a[B],g=a.o,l=a.capture;f.removeEventListener?f.removeEventListener(e,g,l):f.detachEvent&&f.detachEvent(e in Z?Z[e]:Z[e]="on"+e,g);f=f[K]||(f[K]=++ka);if(Y[f]){var g=Y[f],k=ua(g,a);0<=k&&(M(null!=
-g[C]),N.splice[F](g,k,1));0==g[C]&&delete Y[f]}a.r();if(g=X[e][l][f])g.n=!0,lb(e,l,f,g);delete gb[a.key]}}return c[F](d,b)},kb=function(a,b){if(a.d)return!0;var c=a[B],d=X;if(!(c in d))return!0;var d=d[c],f,e;if(!ab){var g;if(!(g=b))t:{g=["window","event"];for(var l=H;f=g[u]();)if(null!=l[f])l=l[f];else{g=null;break t}g=l}f=g;g=!0 in d;l=!1 in d;if(g){if(0>f[z]||void 0!=f.returnValue)return!0;t:{var k=!1;if(0==f[z])try{ca(f,-1);break t}catch(P){k=!0}if(k||void 0==f.returnValue)f.returnValue=!0}}k=
-new cb;k.p(f,this);f=!0;try{if(g){for(var n=[],s=k.currentTarget;s;s=s.parentNode)n[t](s);e=d[!0];e.b=e.a;for(var A=n[C]-1;!k.m&&0<=A&&e.b;A--)ba(k,n[A]),f&=nb(e,n[A],c,!0,k);if(l)for(e=d[!1],e.b=e.a,A=0;!k.m&&A<n[C]&&e.b;A++)ba(k,n[A]),f&=nb(e,n[A],c,!1,k)}else f=mb(a,k)}finally{n&&da(n,0)}return f}c=new cb(b,this);return f=mb(a,c)},ob="__closure_events_fn_"+(1E9*Math.random()>>>0),ib=function(a){M(a,"Listener can not be null.");if("function"==I(a))return a;M(a.handleEvent,"An object listener must have handleEvent method.");
-return a[ob]||(a[ob]=function(b){return a.handleEvent(b)})};var pb=function(a,b){var c=[];1<arguments[C]&&(c=m[D][v][F](arguments)[v](1));var d=Ua(h,"th","tct-selectall",a);if(0!=d[C]){var d=d[0],f=0,e=Ua(h,"tbody",null,a);e[C]&&(f=e[0].rows[C]);this.f=Ya("input",{type:"checkbox"});d.appendChild(this.f);f?hb(this.f,"click",this.t,!1,this):r(this.f,!0);this.g=[];this.h=[];this.i=[];d=Ua(h,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[B]&&e!=this.f?(this.g[t](e),hb(e,"click",this.s,!1,this)):"action"==e[fa]&&(0<=c[y](e[x])?this.i[t](e):this.h[t](e),r(e,!0))}};
-G=pb[D];G.g=null;G.c=0;G.f=null;G.h=null;G.i=null;G.t=function(a){for(var b=a[ha].checked,c=a=0,d;d=this.g[c];c++)d.checked=b,a+=1;this.c=b?this.g[C]:0;for(c=0;b=this.h[c];c++)r(b,!this.c);for(c=0;b=this.i[c];c++)r(b,1!=a?!0:!1)};G.s=function(a){this.c+=a[ha].checked?1:-1;this.f.checked=this.c==this.g[C];a=0;for(var b;b=this.h[a];a++)r(b,!this.c);for(a=0;b=this.i[a];a++)r(b,1!=this.c?!0:!1)};var qb=function(){var a=V("kinds");a&&new pb(a);(a=V("pending_backups"))&&new pb(a);(a=V("backups"))&&new pb(a,"Restore");var b=V("ae-datastore-admin-filesystem");b&&hb(b,"change",function(){var a="gs"==Za(b);V("gs_bucket_tr").style.display=a?"":"none"})},rb=["ae","Datastore","Admin","init"],$=H;rb[0]in $||!$.execScript||$.execScript("var "+rb[0]);for(var sb;rb[C]&&(sb=rb[u]());)rb[C]||void 0===qb?$=$[sb]?$[sb]:$[sb]={}:$[sb]=qb;
+var h=document,k=Array,n=Error,q=parseInt,r=String;function aa(a,b){return a.currentTarget=b}function ba(a,b){return a.keyCode=b}function s(a,b){return a.disabled=b}
+var u="push",v="shift",w="slice",x="replace",y="value",ca="preventDefault",z="indexOf",A="keyCode",C="type",da="name",D="length",ea="propertyIsEnumerable",E="prototype",fa="checked",F="split",G="style",ga="target",H="call",ha="apply",I,J=this,K=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof k)return"array";if(a instanceof Object)return b;var c=Object[E].toString[H](a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a[D]&&"undefined"!=typeof a.splice&&
+"undefined"!=typeof a[ea]&&!a[ea]("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a[H]&&"undefined"!=typeof a[ea]&&!a[ea]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[H])return"object";return b},ia=function(a){var b=K(a);return"array"==b||"object"==b&&"number"==typeof a[D]},L=function(a){return"string"==typeof a},ja=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},M="closure_uid_"+(1E9*Math.random()>>>0),ka=0,la=
+function(a,b){var c=k[E][w][H](arguments,1);return function(){var b=k[E][w][H](arguments);b.unshift[ha](b,c);return a[ha](this,b)}},ma=function(a,b){function c(){}c.prototype=b[E];a.q=b[E];a.prototype=new c};var N=function(a){n.captureStackTrace?n.captureStackTrace(this,N):this.stack=n().stack||"";a&&(this.message=r(a))};ma(N,n);N[E].name="CustomError";var na=function(a,b){for(var c=a[F]("%s"),d="",f=k[E][w][H](arguments,1);f[D]&&1<c[D];)d+=c[v]()+f[v]();return d+c.join("%s")},ta=function(a,b){if(b)return a[x](oa,"&amp;")[x](pa,"&lt;")[x](qa,"&gt;")[x](ra,"&quot;");if(!sa.test(a))return a;-1!=a[z]("&")&&(a=a[x](oa,"&amp;"));-1!=a[z]("<")&&(a=a[x](pa,"&lt;"));-1!=a[z](">")&&(a=a[x](qa,"&gt;"));-1!=a[z]('"')&&(a=a[x](ra,"&quot;"));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,sa=/[&<>\"]/,ua=function(a){return r(a)[x](/\-([a-z])/g,function(a,c){return c.toUpperCase()})},
+va=function(a,b){var c=L(b)?r(b)[x](/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1")[x](/\x08/g,"\\x08"):"\\s";return a[x](RegExp("(^"+(c?"|["+c+"]+":"")+")([a-z])","g"),function(a,b,c){return b+c.toUpperCase()})};var wa=function(a,b){b.unshift(a);N[H](this,na[ha](null,b));b[v]()};ma(wa,N);wa[E].name="AssertionError";var O=function(a,b,c){if(!a){var d=k[E][w][H](arguments,2),f="Assertion failed";if(b)var f=f+(": "+b),e=d;throw new wa(""+f,e||[]);}return a};var P=k[E],xa=P[z]?function(a,b,c){O(null!=a[D]);return P[z][H](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[D]+c):c;if(L(a))return L(b)&&1==b[D]?a[z](b,c):-1;for(;c<a[D];c++)if(c in a&&a[c]===b)return c;return-1},ya=P.forEach?function(a,b,c){O(null!=a[D]);P.forEach[H](a,b,c)}:function(a,b,c){for(var d=a[D],f=L(a)?a[F](""):a,e=0;e<d;e++)e in f&&b[H](c,f[e],e,a)},za=function(a,b){var c=xa(a,b),d;if(d=0<=c)O(null!=a[D]),P.splice[H](a,c,1);return d},Aa=function(a){var b=a[D];if(0<b){for(var c=
+k(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ba=function(a,b,c){O(null!=a[D]);return 2>=arguments[D]?P[w][H](a,b):P[w][H](a,b,c)};var Ca=function(a,b,c){for(var d in a)b[H](c,a[d],d,a)},Da="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ea=function(a,b){for(var c,d,f=1;f<arguments[D];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<Da[D];e++)c=Da[e],Object[E].hasOwnProperty[H](d,c)&&(a[c]=d[c])}};var Q,Fa,Ga,Ha,Ia=function(){return J.navigator?J.navigator.userAgent:null};Ha=Ga=Fa=Q=!1;var R;if(R=Ia()){var Ja=J.navigator;Q=0==R.lastIndexOf("Opera",0);Fa=!Q&&(-1!=R[z]("MSIE")||-1!=R[z]("Trident"));Ga=!Q&&-1!=R[z]("WebKit");Ha=!Q&&!Ga&&!Fa&&"Gecko"==Ja.product}var Ka=Q,S=Fa,T=Ha,U=Ga,La=function(){var a=J.document;return a?a.documentMode:void 0},Ma;
+t:{var Na="",Oa;if(Ka&&J.opera)var Pa=J.opera.version,Na="function"==typeof Pa?Pa():Pa;else if(T?Oa=/rv\:([^\);]+)(\)|;)/:S?Oa=/\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/:U&&(Oa=/WebKit\/(\S+)/),Oa)var Qa=Oa.exec(Ia()),Na=Qa?Qa[1]:"";if(S){var Ra=La();if(Ra>parseFloat(Na)){Ma=r(Ra);break t}}Ma=Na}
+var Sa=Ma,Ta={},V=function(a){var b;if(!(b=Ta[a])){b=0;for(var c=r(Sa)[x](/^[\s\xa0]+|[\s\xa0]+$/g,"")[F]("."),d=r(a)[x](/^[\s\xa0]+|[\s\xa0]+$/g,"")[F]("."),f=Math.max(c[D],d[D]),e=0;0==b&&e<f;e++){var g=c[e]||"",m=d[e]||"",l=RegExp("(\\d*)(\\D*)","g"),Ua=RegExp("(\\d*)(\\D*)","g");do{var p=l.exec(g)||["","",""],t=Ua.exec(m)||["","",""];if(0==p[0][D]&&0==t[0][D])break;b=((0==p[1][D]?0:q(p[1],10))<(0==t[1][D]?0:q(t[1],10))?-1:(0==p[1][D]?0:q(p[1],10))>(0==t[1][D]?0:q(t[1],10))?1:0)||((0==p[2][D])<
+(0==t[2][D])?-1:(0==p[2][D])>(0==t[2][D])?1:0)||(p[2]<t[2]?-1:p[2]>t[2]?1:0)}while(0==b)}b=Ta[a]=0<=b}return b},Va=J.document,Wa=Va&&S?La()||("CSS1Compat"==Va.compatMode?q(Sa,10):5):void 0;var Xa=!S||S&&9<=Wa;!T&&!S||S&&S&&9<=Wa||T&&V("1.9.1");S&&V("9");var Ya=function(a,b){var c;c=a.className;c=L(c)&&c.match(/\S+/g)||[];for(var d=Ba(arguments,1),f=c[D]+d[D],e=c,g=0;g<d[D];g++)0<=xa(e,d[g])||e[u](d[g]);a.className=c.join(" ");return c[D]==f};var W=function(a){return L(a)?h.getElementById(a):a},Za=function(a,b,c,d){a=d||a;b=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(b||c))return a.querySelectorAll(b+(c?"."+c:""));if(c&&a.getElementsByClassName){a=a.getElementsByClassName(c);if(b){d={};for(var f=0,e=0,g;g=a[e];e++)b==g.nodeName&&(d[f++]=g);d.length=f;return d}return a}a=a.getElementsByTagName(b||"*");if(c){d={};for(e=f=0;g=a[e];e++)b=g.className,"function"==typeof b[F]&&0<=xa(b[F](/\s+/),c)&&(d[f++]=g);d.length=
+f;return d}return a},ab=function(a,b){Ca(b,function(b,d){"style"==d?a[G].cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in $a?a.setAttribute($a[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},$a={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},cb=function(a,b,c){var d=arguments,
+f=d[0],e=d[1];if(!Xa&&e&&(e[da]||e[C])){f=["<",f];e[da]&&f[u](' name="',ta(e[da]),'"');if(e[C]){f[u](' type="',ta(e[C]),'"');var g={};Ea(g,e);delete g[C];e=g}f[u](">");f=f.join("")}f=h.createElement(f);e&&(L(e)?f.className=e:"array"==K(e)?Ya[ha](null,[f].concat(e)):ab(f,e));2<d[D]&&bb(h,f,d,2);return f},bb=function(a,b,c,d){function f(c){c&&b.appendChild(L(c)?a.createTextNode(c):c)}for(;d<c[D];d++){var e=c[d];if(!ia(e)||ja(e)&&0<e.nodeType)f(e);else{var g;t:{if(e&&"number"==typeof e[D]){if(ja(e)){g=
+"function"==typeof e.item||"string"==typeof e.item;break t}if("function"==K(e)){g="function"==typeof e.item;break t}}g=!1}ya(g?Aa(e):e,f)}}};var db=function(a){var b=a[C];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a[fa]?a[y]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][y]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[u](c[y]);return b[D]?b:null;default:return void 0!==a[y]?a[y]:null}};var eb=function(a){eb[" "](a);return a};eb[" "]=function(){};var fb=!S||S&&9<=Wa,gb=S&&!V("9");!U||V("528");T&&V("1.9b")||S&&V("8")||Ka&&V("9.5")||U&&V("528");T&&!V("8")||S&&V("9");var hb=function(a,b){this.type=a;this.target=b;aa(this,this[ga])};hb[E].k=!1;hb[E].defaultPrevented=!1;hb[E].preventDefault=function(){this.defaultPrevented=!0};var ib=function(a,b){a&&this.r(a,b)};ma(ib,hb);I=ib[E];I.target=null;I.relatedTarget=null;I.offsetX=0;I.offsetY=0;I.clientX=0;I.clientY=0;I.screenX=0;I.screenY=0;I.button=0;ba(I,0);I.charCode=0;I.ctrlKey=!1;I.altKey=!1;I.shiftKey=!1;I.metaKey=!1;I.m=null;
+I.r=function(a,b){var c=this.type=a[C];hb[H](this,c);this.target=a[ga]||a.srcElement;aa(this,b);var d=a.relatedTarget;if(d){if(T){var f;t:{try{eb(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=U||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=U||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=
+a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ba(this,a[A]||0);this.charCode=a.charCode||("keypress"==c?a[A]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.m=a;a.defaultPrevented&&this[ca]();delete this.k};I.preventDefault=function(){ib.q[ca][H](this);var a=this.m;if(a[ca])a[ca]();else if(a.returnValue=!1,gb)try{(a.ctrlKey||112<=a[A]&&123>=a[A])&&ba(a,-1)}catch(b){}};var jb="closure_listenable_"+(1E6*Math.random()|0),kb=0;var lb=function(a,b,c,d,f,e){this.c=a;this.l=b;this.src=c;this.type=d;this.capture=!!f;this.i=e;this.key=++kb;this.e=this.j=!1};lb[E].n=function(){this.e=!0;this.i=this.src=this.l=this.c=null};var mb={},X={},Y={},Z={},nb=function(a,b,c,d,f){if("array"==K(b)){for(var e=0;e<b[D];e++)nb(a,b[e],c,d,f);return null}c=ob(c);if(a&&a[jb])a=a.t(b,c,d,f);else t:{if(!b)throw n("Invalid event type");d=!!d;var g=X;b in g||(g[b]={a:0});g=g[b];d in g||(g[d]={a:0},g.a++);var g=g[d],e=a[M]||(a[M]=++ka),m;if(g[e]){m=g[e];for(var l=0;l<m[D];l++)if(g=m[l],g.c==c&&g.i==f){if(g.e)break;m[l].j=!1;a=m[l];break t}}else m=g[e]=[],g.a++;l=pb();g=new lb(c,l,a,b,d,f);g.j=!1;l.src=a;l.c=g;m[u](g);Y[e]||(Y[e]=[]);Y[e][u](g);
+a.addEventListener?a.addEventListener(b,l,d):a.attachEvent(b in Z?Z[b]:Z[b]="on"+b,l);a=mb[g.key]=g}return a},pb=function(){var a=qb,b=fb?function(c){return a[H](b.src,b.c,c)}:function(c){c=a[H](b.src,b.c,c);if(!c)return c};return b},sb=function(a,b,c,d,f){c=1;b=b[M]||(b[M]=++ka);if(a[b])for(a=Aa(a[b]),b=0;b<a[D];b++)(d=a[b])&&!d.e&&(c&=!1!==rb(d,f));return Boolean(c)},rb=function(a,b){var c=a.c,d=a.i||a.src;if(a.j&&"number"!=typeof a&&a&&!a.e){var f=a.src;if(f&&f[jb])f.s(a);else{var e=a[C],g=a.l,
+m=a.capture;f.removeEventListener?f.removeEventListener(e,g,m):f.detachEvent&&f.detachEvent(e in Z?Z[e]:Z[e]="on"+e,g);f=f[M]||(f[M]=++ka);Y[f]&&(g=Y[f],za(g,a),0==g[D]&&delete Y[f]);a.n();if(g=X[e][m][f])za(g,a),0==g[D]&&(delete X[e][m][f],X[e][m].a--),0==X[e][m].a&&(delete X[e][m],X[e].a--),0==X[e].a&&delete X[e];delete mb[a.key]}}return c[H](d,b)},qb=function(a,b){if(a.e)return!0;var c=a[C],d=X;if(!(c in d))return!0;var d=d[c],f,e;if(!fb){var g;if(!(g=b))t:{g=["window","event"];for(var m=J;f=g[v]();)if(null!=
+m[f])m=m[f];else{g=null;break t}g=m}f=g;g=!0 in d;m=!1 in d;if(g){if(0>f[A]||void 0!=f.returnValue)return!0;t:{var l=!1;if(0==f[A])try{ba(f,-1);break t}catch(Ua){l=!0}if(l||void 0==f.returnValue)f.returnValue=!0}}l=new ib(f,this);f=!0;try{if(g){for(var p=[],t=l.currentTarget;t;t=t.parentNode)p[u](t);e=d[!0];for(var B=p[D]-1;!l.k&&0<=B;B--)aa(l,p[B]),f&=sb(e,p[B],c,!0,l);if(m)for(e=d[!1],B=0;!l.k&&B<p[D];B++)aa(l,p[B]),f&=sb(e,p[B],c,!1,l)}else f=rb(a,l)}finally{p&&(p.length=0)}return f}return rb(a,
+new ib(b,this))},tb="__closure_events_fn_"+(1E9*Math.random()>>>0),ob=function(a){O(a,"Listener can not be null.");if("function"==K(a))return a;O(a.handleEvent,"An object listener must have handleEvent method.");return a[tb]||(a[tb]=function(b){return a.handleEvent(b)})};var ub=function(a,b,c){var d;t:if(d=ua(c),void 0===a[G][d]&&(c=(U?"Webkit":T?"Moz":S?"ms":Ka?"O":null)+va(c),void 0!==a[G][c])){d=c;break t}d&&(a[G][d]=b)};var vb=function(a,b){var c=[];1<arguments[D]&&(c=k[E][w][H](arguments)[w](1));var d=Za(h,"th","tct-selectall",a);if(0!=d[D]){var d=d[0],f=0,e=Za(h,"tbody",null,a);e[D]&&(f=e[0].rows[D]);this.d=cb("input",{type:"checkbox"});d.appendChild(this.d);f?nb(this.d,"click",this.p,!1,this):s(this.d,!0);this.f=[];this.g=[];this.h=[];d=Za(h,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[C]&&e!=this.d?(this.f[u](e),nb(e,"click",this.o,!1,this)):"action"==e[da]&&(0<=c[z](e[y])?this.h[u](e):this.g[u](e),s(e,!0))}};
+I=vb[E];I.f=null;I.b=0;I.d=null;I.g=null;I.h=null;I.p=function(a){for(var b=a[ga][fa],c=a=0,d;d=this.f[c];c++)d.checked=b,a+=1;this.b=b?this.f[D]:0;for(c=0;b=this.g[c];c++)s(b,!this.b);for(c=0;b=this.h[c];c++)s(b,1!=a?!0:!1)};I.o=function(a){this.b+=a[ga][fa]?1:-1;this.d.checked=this.b==this.f[D];a=0;for(var b;b=this.g[a];a++)s(b,!this.b);for(a=0;b=this.h[a];a++)s(b,1!=this.b?!0:!1)};var wb=function(){var a=W("kinds");a&&new vb(a);(a=W("pending_backups"))&&new vb(a);(a=W("backups"))&&new vb(a,"Restore");var b=W("ae-datastore-admin-filesystem");b&&nb(b,"change",function(){var a="gs"==db(b);W("gs_bucket_tr")[G].display=a?"":"none"});if(a=W("confirm_delete_form")){var c=W("confirm_readonly_delete");c&&(a.onsubmit=function(){var a=W("confirm_message");L("color")?ub(a,"red","color"):Ca("color",la(ub,a));return c[fa]})}},xb=["ae","Datastore","Admin","init"],$=J;
+xb[0]in $||!$.execScript||$.execScript("var "+xb[0]);for(var yb;xb[D]&&(yb=xb[v]());)xb[D]||void 0===wb?$=$[yb]?$[yb]:$[yb]={}:$[yb]=wb;
diff --git a/google/appengine/ext/datastore_admin/templates/backup_information.html b/google/appengine/ext/datastore_admin/templates/backup_information.html
index d4301c2..7afa1a4 100644
--- a/google/appengine/ext/datastore_admin/templates/backup_information.html
+++ b/google/appengine/ext/datastore_admin/templates/backup_information.html
@@ -25,7 +25,7 @@
     </table>
   </p>
   {% endfor %}
-  <form action="{{ back_target }}" method="post">
-    <input class="goog-button" type="submit" value="Back">
+  <form action="{{ datastore_admin_home }}" method="post">
+    <input class="goog-button" type="submit" value="Back to Datastore Admin">
   </form>
 {% endblock %}
diff --git a/google/appengine/ext/datastore_admin/templates/base.html b/google/appengine/ext/datastore_admin/templates/base.html
index 7465239..19185bf 100644
--- a/google/appengine/ext/datastore_admin/templates/base.html
+++ b/google/appengine/ext/datastore_admin/templates/base.html
@@ -8,7 +8,7 @@
   {% block head %}{% endblock %}
   <script src="{{base_path}}/static/js/compiled.js"></script>
 </head>
-<body>
+<body style="visibility: hidden;">
 <script type="text/javascript">
   {% comment %}
     The below section is used to detect whether or not we're inside of an
@@ -25,6 +25,23 @@
     document.write('<div id="ae-content" class="g-unit" style="padding-left: 0em;">')
     document.write('<div id="ae-datastore-admin-body" class="g-doc-1024" style="margin-left: 0px; min-width: 700px; width: 775px;">')
   }
+
+  {% comment %}
+    The section below is used to prevent clickjacking by ensuring if this page
+    is inside an iframe, the top parent frame is the admin console. The check
+    is skipped if the browser does not support ancestorOrigins or there are no
+    ancestorOrigins (this page is the top level frame).
+  {% endcomment %}
+  var allowedOrigins = ['{{ admin_console_url }}'];
+  if (location && location.ancestorOrigins &&
+      location.ancestorOrigins.length > 0) {
+    var topOrigin = Array.prototype.pop.call(location.ancestorOrigins);
+    if (allowedOrigins.indexOf(topOrigin) > -1) {
+      document.body.style.visibility = "visible";
+    }
+  } else {
+    document.body.style.visibility = "visible";
+  }
 </script>
 <div id="bd">
 {% block body %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_abort_backup.html b/google/appengine/ext/datastore_admin/templates/confirm_abort_backup.html
index ce71c23..61b999d 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_abort_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_abort_backup.html
@@ -14,10 +14,13 @@
       {% for backup in backups %}
         <input type="hidden" name="backup_id" value="{{backup.key}}">
       {% endfor %}
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
       <input type="hidden" name="namespace" value="{{namespace|escape}}">
       <input type="hidden" name="xsrf_token" value="{{xsrf_token|escape}}">
       <input type="Submit" value="Yes">
-      <a href="{{ cancel_url|escape }}">No</a>
+      <a href="{{ datastore_admin_home }}">No</a>
     </form>
 
   {% else %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_backup.html b/google/appengine/ext/datastore_admin/templates/confirm_backup.html
index 9e65be7..5c98f88 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_backup.html
@@ -14,6 +14,9 @@
 
     <form class="ae-form" action="{{base_path}}/{{form_target}}" method="post"
         style="width:39.39em;">
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
       {% for kind in kind_list %}
       <input type="hidden" name="kind" value="{{kind|escape}}">
       {% endfor %}
@@ -55,6 +58,8 @@
           {% endfor %}
         </select>
       </p>
+
+      {% if not run_as_a_service %}
       <p>Operation will be performed using the selected queue:
         {% if queues %}
         <select name="queue">
@@ -66,18 +71,21 @@
         <input type="text" name="queue" value="default"/>
         {% endif %}
       </p>
+      {% endif %}
 
       <table>
         <tr>
           <td>
             Backup storage destination:
             <select name="filesystem" id="ae-datastore-admin-filesystem">
+              {% if not run_as_a_service %}
               <option value="blobstore" selected="selected">Blobstore</option>
+              {% endif %}
               <option value="gs">Google Cloud Storage</option>
             </select>
           </td>
         </tr>
-        <tr style="display:none;" id="gs_bucket_tr">
+        <tr {% if not run_as_a_service %}style="display:none;"{% endif %} id="gs_bucket_tr">
           <td>
             Google Cloud Storage bucket name
             (<a target="_blank"
@@ -92,7 +100,7 @@
           <input class="goog-button" type="submit" name="backup"
               value="Backup Entities"></td>
         <td style="color: #666667;">
-          or <a href="{{ cancel_url|escape }}">Cancel</a></td>
+          or <a href="{{ datastore_admin_home }}">Cancel</a></td>
       </tr></table>
     </form>
   {% else %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_backup_import.html b/google/appengine/ext/datastore_admin/templates/confirm_backup_import.html
index 1ef9427..3a331f8 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_backup_import.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_backup_import.html
@@ -3,12 +3,12 @@
 {% block body %}
   <h2>Datastore Admin: Import backup information</h2>
   {% if error %}
-    <div class="ae-errorbox">{{error}}</div>
-    <a href="{{ cancel_url|escape }}"><button>Back</button></a>
+    <div class="ae-errorbox">{{ error|escape }}</div>
+    <a href="{{ datastore_admin_home }}"><button>Back</button></a>
   {% else %}
     {% if not other_backup_info_files and not selected_backup_info_file %}
       <div class="ae-errorbox">No backup information files were found.</div>
-      <a href="{{ cancel_url|escape }}"><button>Back</button></a>
+      <a href="{{ datastore_admin_home }}"><button>Back</button></a>
     {% else %}
       <form action="{{base_path}}/{{form_target}}" method="post" style="width:39.39em;">
         <input type="hidden" name="xsrf_token" value="{{xsrf_token|escape}}">
@@ -30,9 +30,12 @@
             <input type="radio" name="gs_handle" value="{{backup_info_file}}">{{backup_info_file|escape}}</br>
           {% endfor %}
         {% endif %}
+        {% if run_as_a_service %}
+          <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+        {% endif %}
         <input type="Submit" name="Import" value="Add to backup list">
         <input type="Submit" name="Restore" value="Restore from backup">
-        <a href="{{ cancel_url|escape }}">Cancel</a>
+        <a href="{{ datastore_admin_home }}">Cancel</a>
       </form>
     {% endif %}
   {% endif %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_copy.html b/google/appengine/ext/datastore_admin/templates/confirm_copy.html
index 20ac44e..c7a4301 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_copy.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_copy.html
@@ -102,7 +102,7 @@
           <input class="goog-button" type="submit" name="copy"
               value="Copy Entities"></td>
         <td style="color: #666667;">
-          or <a href="{{ cancel_url|escape }}">Cancel</a></td>
+          or <a href="{{ datastore_admin_home }}">Cancel</a></td>
       </tr></table>
     </form>
   {% else %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_delete.html b/google/appengine/ext/datastore_admin/templates/confirm_delete.html
index 3e3f56b..11e6adc 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_delete.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_delete.html
@@ -38,13 +38,14 @@
       entities?
       </div>
     </div>
-    <form action="{{base_path}}/{{form_target}}" method="post">
+    <form action="{{base_path}}/{{form_target}}" id="confirm_delete_form" method="post">
       {% for kind in kind_list %}
       <input type="hidden" name="kind" value="{{kind}}">
       {% endfor %}
-      <input type="hidden" name="namespace" value="{{namespace}}">
-      <input type="hidden" name="app_id" value="{{app_id}}">
+      <input type="hidden" name="namespace" value="{{ namespace|escape }}">
+      <input type="hidden" name="app_id" value="{{ app_id|escape }}">
       <input type="hidden" name="xsrf_token" value="{{xsrf_token}}">
+      <input type="hidden" name="readonly_warning" value="{{readonly_warning}}">
       <p>Operation will be performed using the selected queue:
         {% if queues %}
           <select name="queue">
@@ -56,12 +57,29 @@
           <input type="text" name="queue" value="default"/>
         {% endif %}
       </p>
+      {% if readonly_warning %}
+        <table style="padding-top: 0.5em;">
+         <tr>
+          <td style="padding-right: 0.5em;">
+           <input type="checkbox" id="confirm_readonly_delete" name="confirm_readonly_delete">
+          </td>
+          <td>
+           <div id="confirm_message">
+             <strong>
+              Datastore writes are currently disabled. Please confirm if you
+              still want to go ahead with deletion.
+             </strong>
+           </div>
+          </td>
+         </tr>
+        </table>
+      {% endif %}
       <table style="padding-top: 1em;"><tr>
         <td style="padding-right: 0.5em;">
           <input class="goog-button" type="submit" name="delete"
               value="Delete Entities"></td>
         <td style="color: #666667;">
-          or <a href="{{ cancel_url }}">Cancel</a></td>
+          or <a href="{{ datastore_admin_home }}">Cancel</a></td>
       </tr></table>
     </form>
   {% else %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_delete_backup.html b/google/appengine/ext/datastore_admin/templates/confirm_delete_backup.html
index 9a312dd..0abd569 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_delete_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_delete_backup.html
@@ -16,10 +16,13 @@
       {% for backup in backups %}
         <input type="hidden" name="backup_id" value="{{backup.key}}">
       {% endfor %}
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
       <input type="hidden" name="namespace" value="{{namespace|escape}}">
       <input type="hidden" name="xsrf_token" value="{{xsrf_token|escape}}">
       <input type="Submit" value="Yes">
-      <a href="{{ cancel_url|escape }}">Cancel</a>
+      <a href="{{ datastore_admin_home }}">Cancel</a>
     </form>
 
   {% else %}
diff --git a/google/appengine/ext/datastore_admin/templates/confirm_restore_from_backup.html b/google/appengine/ext/datastore_admin/templates/confirm_restore_from_backup.html
index 0b7e638..efca183 100644
--- a/google/appengine/ext/datastore_admin/templates/confirm_restore_from_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/confirm_restore_from_backup.html
@@ -7,9 +7,12 @@
     <br/>Restore does not delete any new entities added since the backup.
     However, it will replace existing entities with the backed-up version for the following kinds:
     <form action="{{base_path}}/{{form_target}}" method="post" style="width:39.39em;">
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
       <input type="hidden" name="backup_id" value="{{backup.key}}">
       <input type="hidden" name="xsrf_token" value="{{xsrf_token|escape}}">
-      <input type="hidden" name="delete_backup_after_restore" value="{{delete_backup_after_restore}}">
+      <input type="hidden" name="delete_backup_after_restore" value="{{ delete_backup_after_restore|escape }}">
       <ul>
         {% for kind in backup.kinds %}
           <li><input type="checkbox" name="kind" value="{{kind}}" checked="checked"/>{{ kind }}</li>
@@ -62,7 +65,7 @@
         {% endif %}
       </p>
       <input type="submit" value="Restore">
-      <a href="{{ cancel_url|escape }}">Cancel</a>
+      <a href="{{ datastore_admin_home }}">Cancel</a>
     </form>
   {% else %}
     <div class="ae-errorbox">Backup was not found.</div>
diff --git a/google/appengine/ext/datastore_admin/templates/do_backup.html b/google/appengine/ext/datastore_admin/templates/do_backup.html
index 0246ee6..57ebb47 100644
--- a/google/appengine/ext/datastore_admin/templates/do_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/do_backup.html
@@ -47,6 +47,13 @@
         </p>
       </div>
     {% endif %}
+    {% if remote_job %}
+      <div class="ae-message">
+        <div class="ae-alert">
+          Backup was initiated as a remote job.
+        </div>
+      </div>
+    {% endif %}
     {% if error %}
       <p>There was a problem kicking some off the jobs/tasks:
         <pre><code>{{error|escape}}</code></pre>
diff --git a/google/appengine/ext/datastore_admin/templates/do_delete.html b/google/appengine/ext/datastore_admin/templates/do_delete.html
index 5e8ffac..fcba416 100644
--- a/google/appengine/ext/datastore_admin/templates/do_delete.html
+++ b/google/appengine/ext/datastore_admin/templates/do_delete.html
@@ -8,35 +8,41 @@
 {% endblock %}
 {% block body %}
   <h2>Delete Job Status</h2>
-  {% if xsrf_error %}
+  {% if noconfirm_error %}
     <div class="ae-errorbox">
-      The token used to submit this form has expired.
+      User did not confirm deletion in read only mode.
     </div>
   {% else %}
-    {% if job_list %}
-      <div class="ae-message">
-        <div class="ae-alert">
-          The following jobs were launched by MapReduce.
-        </div>
-        <p>Each job's status will only be available for as long as the
-        job takes to complete.  Once the job is complete, it will remove the
-        status objects from your datastore.  Click a job's id for status
-        information.
-        </p>
+    {% if xsrf_error %}
+      <div class="ae-errorbox">
+        The token used to submit this form has expired.
       </div>
-    {% endif %}
-    {% for id in job_list %}
-      <p>Delete <a href="{{mapreduce_detail}}{{id}}">job with id {{id}}</a>
-          kicked off.</p>
-    {% endfor %}
+    {% else %}
+      {% if job_list %}
+        <div class="ae-message">
+          <div class="ae-alert">
+            The following jobs were launched by MapReduce.
+          </div>
+          <p>Each job's status will only be available for as long as the
+          job takes to complete.  Once the job is complete, it will remove the
+          status objects from your datastore.  Click a job's id for status
+          information.
+          </p>
+        </div>
+      {% endif %}
+      {% for id in job_list %}
+        <p>Delete <a href="{{ mapreduce_detail|escape }}{{ id|escape }}">job
+        with id {{id|escape}}</a> kicked off.</p>
+      {% endfor %}
 
-    {% if error %}
-      <p>There was a problem kicking off the jobs.
-      {% if job_list %}The above jobs did start successfully.  The error starting
-          the remaining jobs was:
-      {% else %}The error was:
-      {% endif %}</p>
-      <pre><code>{{error}}</code></pre>
+      {% if error %}
+        <p>There was a problem kicking off the jobs.
+        {% if job_list %}The above jobs did start successfully.  The error starting
+            the remaining jobs was:
+        {% else %}The error was:
+        {% endif %}</p>
+        <pre><code>{{ error|escape }}</code></pre>
+      {% endif %}
     {% endif %}
   {% endif %}
   <a href="{{datastore_admin_home}}">Back to Datastore Admin</a>
diff --git a/google/appengine/ext/datastore_admin/templates/do_restore_from_backup.html b/google/appengine/ext/datastore_admin/templates/do_restore_from_backup.html
index 5ce0584..367f03c 100644
--- a/google/appengine/ext/datastore_admin/templates/do_restore_from_backup.html
+++ b/google/appengine/ext/datastore_admin/templates/do_restore_from_backup.html
@@ -24,12 +24,19 @@
         information.
         </p>
       </div>
+      {% for id in job_list %}
+        <p>Restore from backup <a href="{{mapreduce_detail|escape}}{{id|escape}}">job with id
+          {{id|escape}}</a>
+            kicked off.</p>
+      {% endfor %}
     {% endif %}
-    {% for id in job_list %}
-      <p>Backup <a href="{{mapreduce_detail|escape}}{{id|escape}}">job with id
-        {{id|escape}}</a>
-          kicked off.</p>
-    {% endfor %}
+    {% if remote_job %}
+      <div class="ae-message">
+        <div class="ae-alert">
+          Restore from backup was initiated as a remote job.
+        </div>
+      </div>
+    {% endif %}
 
     {% if error %}
       <p>There was a problem kicking off the jobs.
diff --git a/google/appengine/ext/datastore_admin/templates/list_actions.html b/google/appengine/ext/datastore_admin/templates/list_actions.html
index 78e15e1..e5820b6 100644
--- a/google/appengine/ext/datastore_admin/templates/list_actions.html
+++ b/google/appengine/ext/datastore_admin/templates/list_actions.html
@@ -3,7 +3,7 @@
   <h2>Datastore Admin of {{hosting_app_id}}</h2>
   {% if error %}
   <div class="ae-errorbox">
-    {{ error }}
+    {{ error|escape }}
   </div>
   {% endif %}
   <div class="ae-table-caption">
@@ -32,11 +32,13 @@
   </div>
   {% if kind_stats %}
   <form action="{{ base_path }}/" method="post">
-    {% if has_namespace %}
-    <input type="hidden" name="namespace" value="{{ namespace|escape}}">
+    {% if run_as_a_service %}
+      <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
     {% endif %}
-    <input type="hidden" name="app_id" value="{{ app_id }}">
-    <input type="hidden" name="cancel_url" value="{{ cancel_url }}">
+    {% if has_namespace %}
+    <input type="hidden" name="namespace" value="{{ namespace|escape }}">
+    {% endif %}
+    <input type="hidden" name="app_id" value="{{ app_id|escape }}">
     <table id="kinds" class="ae-table ae-table-striped">
       <thead>
         <tr>
@@ -67,9 +69,17 @@
               <td class="id" style="color: #666667;">Stats not available</td>
               <td class="id" style="color: #666667;">Stats not available</td>
               <td class="id" style="color: #666667;">Stats not available</td>
+              <td class="id" style="color: #666667;">Stats not available</td>
             {% endif %}
           </tr>
         {% endfor %}
+        {% if more_kinds %}
+          <tr>
+            <td colspan="6" class="cbc">
+              Not all kinds could be displayed, possibly because there were too many. To back up a kind that does not appear in this list, use a <a href="https://developers.google.com/appengine/articles/scheduled_backups" target="_blank">scheduled backup</a>.
+            </td>
+          </tr>
+        {% endif %}
       </tbody>
       <tfoot>
         <tr>
@@ -110,8 +120,10 @@
       </div>
     </div>
     <form action="{{ base_path }}/" method="post">
-      <input type="hidden" name="app_id" value="{{ app_id }}">
-      <input type="hidden" name="cancel_url" value="{{ cancel_url }}">
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
+      <input type="hidden" name="app_id" value="{{ app_id|escape }}">
       <table id="pending_backups" class="ae-table ae-table-striped">
         <thead>
           <tr>
@@ -159,8 +171,10 @@
   </div>
   {% if backups %}
     <form action="{{ base_path }}/" method="post">
-      <input type="hidden" name="app_id" value="{{ app_id }}">
-      <input type="hidden" name="cancel_url" value="{{ cancel_url }}">
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
+      <input type="hidden" name="app_id" value="{{ app_id|escape }}">
       <table id="backups" class="ae-table ae-table-striped">
         <thead>
           <tr>
@@ -202,7 +216,9 @@
   {% endif %}
   <div class="ae-table-caption">
     <form action="{{ base_path }}/" method="post" >
-      <input type="hidden" name="cancel_url" value="{{ cancel_url }}">
+      {% if run_as_a_service %}
+        <input type="hidden" name="run_as_a_service" value="{{ run_as_a_service|escape }}">
+      {% endif %}
       <input type="submit" name="action" value="Import Backup Information" title="Import Backup Information or restore from Google Cloud Storage"/>
       <input type="text" name="gs_handle" value="" size="50" title="Google Cloud Storage path of a backup info file or a bucket name"/>
     </form>
diff --git a/google/appengine/ext/datastore_admin/utils.py b/google/appengine/ext/datastore_admin/utils.py
index 35c0f51..f8c78fd 100644
--- a/google/appengine/ext/datastore_admin/utils.py
+++ b/google/appengine/ext/datastore_admin/utils.py
@@ -29,12 +29,12 @@
 
 from google.appengine.datastore import entity_pb
 from google.appengine.api import datastore
-from google.appengine.api import lib_config
 from google.appengine.api import memcache
 from google.appengine.api import users
 from google.appengine.datastore import datastore_rpc
 from google.appengine.ext import db
 from google.appengine.ext import webapp
+from google.appengine.ext.datastore_admin import config
 from google.appengine.ext.db import stats
 from google.appengine.ext.mapreduce import control
 from google.appengine.ext.mapreduce import model
@@ -50,7 +50,6 @@
 MAPREDUCE_MAX_SHARDS = 256
 RESERVE_KEY_POOL_MAX_SIZE = 1000
 
-
 DATASTORE_ADMIN_OPERATION_KIND = '_AE_DatastoreAdmin_Operation'
 BACKUP_INFORMATION_KIND = '_AE_Backup_Information'
 BACKUP_INFORMATION_FILES_KIND = '_AE_Backup_Information_Kind_Files'
@@ -61,32 +60,6 @@
                          BACKUP_INFORMATION_KIND_TYPE_INFO)
 
 
-class ConfigDefaults(object):
-  """Configurable constants.
-
-  To override datastore_admin configuration values, define values like this
-  in your appengine_config.py file (in the root of your app):
-
-    datastore_admin_MAPREDUCE_PATH = /_ah/mapreduce
-  """
-
-  BASE_PATH = '/_ah/datastore_admin'
-  MAPREDUCE_PATH = '/_ah/mapreduce'
-  DEFERRED_PATH = BASE_PATH + '/queue/deferred'
-  CLEANUP_MAPREDUCE_STATE = True
-
-
-
-config = lib_config.register('datastore_admin', ConfigDefaults.__dict__)
-
-
-
-
-config.BASE_PATH
-
-
-
-
 def IsKindNameVisible(kind_name):
   return not (kind_name.startswith('__') or
               kind_name in DATASTORE_ADMIN_KINDS or
@@ -102,6 +75,18 @@
     template_params: the parameters used to render the given template
   """
   template_params = _GetDefaultParams(template_params)
+
+
+
+
+
+
+
+
+  handler.response.headers['X-FRAME-OPTIONS'] = ('ALLOW-FROM %s' %
+                                                 config.ADMIN_CONSOLE_URL)
+  template_params['admin_console_url'] = config.ADMIN_CONSOLE_URL
+
   rendered = _template.render(_GetTemplatePath(template_file), template_params)
   handler.response.out.write(rendered)
 
@@ -337,6 +322,26 @@
   return datastore_rpc.Configuration(force_writes=True)
 
 
+def GenerateHomeUrl(request):
+  """Generates a link to the Datastore Admin main page.
+
+  Primarily intended to be used for cancel buttons or links on error pages. To
+  avoid any XSS security vulnerabilities the URL should not use any
+  user-defined strings (unless proper precautions are taken).
+
+  Args:
+    request: the webapp.Request object (to determine if certain query
+      parameters need to be used).
+
+  Returns:
+    domain-relative URL for the main Datastore Admin page.
+  """
+  datastore_admin_home = config.BASE_PATH
+  if request and request.get('run_as_a_service'):
+    datastore_admin_home += '?run_as_a_service=True'
+  return datastore_admin_home
+
+
 class MapreduceDoneHandler(webapp.RequestHandler):
   """Handler to delete data associated with successful MapReduce jobs."""
 
@@ -398,6 +403,11 @@
       logging.error('Done callback called without Mapreduce Id.')
 
 
+class Error(Exception):
+  """Base DatastoreAdmin error type."""
+
+
+
 class DatastoreAdminOperation(db.Model):
   """An entity to keep progress and status of datastore admin operation."""
   STATUS_CREATED = 'Created'
@@ -418,6 +428,7 @@
   last_updated = db.DateTimeProperty(default=DEFAULT_LAST_UPDATED_VALUE,
                                      auto_now=True)
   status_info = db.StringProperty(default='', indexed=False)
+  service_job_id = db.StringProperty()
 
   @classmethod
   def kind(cls):
diff --git a/google/appengine/ext/endpoints/__init__.py b/google/appengine/ext/endpoints/__init__.py
index 997ba05..3a77713 100644
--- a/google/appengine/ext/endpoints/__init__.py
+++ b/google/appengine/ext/endpoints/__init__.py
@@ -33,3 +33,5 @@
 from apiserving import *
 import message_parser
 from users_id_token import get_current_user
+from users_id_token import InvalidGetUserCall
+from users_id_token import SKIP_CLIENT_ID_CHECK
diff --git a/google/appengine/ext/endpoints/api_config.py b/google/appengine/ext/endpoints/api_config.py
index 36e9a32..c8ba3f3 100644
--- a/google/appengine/ext/endpoints/api_config.py
+++ b/google/appengine/ext/endpoints/api_config.py
@@ -40,7 +40,6 @@
   import json
 except ImportError:
   import simplejson as json
-import logging
 import re
 
 from protorpc import message_types
@@ -255,6 +254,16 @@
     return self.__common_info.frontend_limits
 
   @property
+  def title(self):
+    """Human readable name of this API."""
+    return self.__common_info.title
+
+  @property
+  def documentation(self):
+    """Link to the documentation for this version of the API."""
+    return self.__common_info.documentation
+
+  @property
   def resource_name(self):
     """Resource name for the class this decorates."""
     return self.__resource_name
@@ -277,7 +286,8 @@
   def __init__(self, name, version, description=None, hostname=None,
                audiences=None, scopes=None, allowed_client_ids=None,
                canonical_name=None, auth=None, owner_domain=None,
-               owner_name=None, package_path=None, frontend_limits=None):
+               owner_name=None, package_path=None, frontend_limits=None,
+               title=None, documentation=None):
     """Constructor for _ApiDecorator.
 
     Args:
@@ -303,6 +313,11 @@
         client libraries of this API.
       frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
         developers.
+      title: string, the human readable title of your API. It is exposed in the
+        discovery service.
+      documentation: string, a URL where users can find documentation about this
+        version of the API. This will be surfaced in the API Explorer and GPE
+        plugin to allow users to learn about your service.
     """
     self.__common_info = self.__ApiCommonInfo(
         name, version, description=description, hostname=hostname,
@@ -310,7 +325,8 @@
         allowed_client_ids=allowed_client_ids,
         canonical_name=canonical_name, auth=auth, owner_domain=owner_domain,
         owner_name=owner_name, package_path=package_path,
-        frontend_limits=frontend_limits)
+        frontend_limits=frontend_limits, title=title,
+        documentation=documentation)
     self.__classes = []
 
   class __ApiCommonInfo(object):
@@ -333,7 +349,8 @@
     def __init__(self, name, version, description=None, hostname=None,
                  audiences=None, scopes=None, allowed_client_ids=None,
                  canonical_name=None, auth=None, owner_domain=None,
-                 owner_name=None, package_path=None, frontend_limits=None):
+                 owner_name=None, package_path=None, frontend_limits=None,
+                 title=None, documentation=None):
       """Constructor for _ApiCommonInfo.
 
       Args:
@@ -359,6 +376,11 @@
           client libraries of this API.
         frontend_limits: ApiFrontEndLimits, optional query limits for
           unregistered developers.
+        title: string, the human readable title of your API. It is exposed in
+          the discovery service.
+        documentation: string, a URL where users can find documentation about
+          this version of the API. This will be surfaced in the API Explorer and
+          GPE plugin to allow users to learn about your service.
       """
       _CheckType(name, basestring, 'name', allow_none=False)
       _CheckType(version, basestring, 'version', allow_none=False)
@@ -373,6 +395,8 @@
       _CheckType(owner_name, basestring, 'owner_name')
       _CheckType(package_path, basestring, 'package_path')
       _CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits')
+      _CheckType(title, basestring, 'title')
+      _CheckType(documentation, basestring, 'documentation')
 
       if hostname is None:
         hostname = app_identity.get_default_version_hostname()
@@ -396,6 +420,8 @@
       self.__owner_name = owner_name
       self.__package_path = package_path
       self.__frontend_limits = frontend_limits
+      self.__title = title
+      self.__documentation = documentation
 
     @property
     def name(self):
@@ -462,6 +488,16 @@
       """Optional query limits for unregistered developers."""
       return self.__frontend_limits
 
+    @property
+    def title(self):
+      """Human readable name of this API."""
+      return self.__title
+
+    @property
+    def documentation(self):
+      """Link to the documentation for this version of the API."""
+      return self.__documentation
+
   def __call__(self, service_class):
     """Decorator for ProtoRPC class that configures Google's API server.
 
@@ -473,12 +509,6 @@
     """
     return self.api_class()(service_class)
 
-  def collection(self, *args, **kwargs):
-    logging.warning('Using deprecated "collection" decorator.  Use the '
-                    'api_class decorator instead.  The "collection" decorator '
-                    'will be removed in a future version.')
-    return self.api_class(*args, **kwargs)
-
   def api_class(self, resource_name=None, path=None, audiences=None,
                 scopes=None, allowed_client_ids=None):
     """Get a decorator for a class that implements an API.
@@ -666,7 +696,7 @@
 def api(name, version, description=None, hostname=None, audiences=None,
         scopes=None, allowed_client_ids=None, canonical_name=None,
         auth=None, owner_domain=None, owner_name=None, package_path=None,
-        frontend_limits=None):
+        frontend_limits=None, title=None, documentation=None):
   """Decorate a ProtoRPC Service class for use by the framework above.
 
   This decorator can be used to specify an API name, version, description, and
@@ -718,6 +748,11 @@
       client libraries of this API.
     frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
       developers.
+    title: string, the human readable title of your API. It is exposed in the
+      discovery service.
+    documentation: string, a URL where users can find documentation about this
+      version of the API. This will be surfaced in the API Explorer and GPE
+      plugin to allow users to learn about your service.
 
   Returns:
     Class decorated with api_info attribute, an instance of ApiInfo.
@@ -729,7 +764,8 @@
                        canonical_name=canonical_name, auth=auth,
                        owner_domain=owner_domain, owner_name=owner_name,
                        package_path=package_path,
-                       frontend_limits=frontend_limits)
+                       frontend_limits=frontend_limits, title=title,
+                       documentation=documentation)
 
 
 class CacheControl(object):
@@ -1676,6 +1712,7 @@
 
     method_map = {}
     method_collision_tracker = {}
+    rest_collision_tracker = {}
 
     for service in services:
       remote_methods = service.all_remote_methods()
@@ -1693,12 +1730,24 @@
 
         if method_id in method_collision_tracker:
           raise ApiConfigurationError(
-              'Method %s used in multiple classes: %s and %s' %
+              'Method %s used multiple times, in classes %s and %s' %
               (method_id, method_collision_tracker[method_id],
                service.__name__))
         else:
           method_collision_tracker[method_id] = service.__name__
 
+
+        rest_identifier = (method_info.http_method,
+                           method_info.get_path(service.api_info))
+        if rest_identifier in rest_collision_tracker:
+          raise ApiConfigurationError(
+              '%s path "%s" used multiple times, in classes %s and %s' %
+              (method_info.http_method, method_info.get_path(service.api_info),
+               rest_collision_tracker[rest_identifier],
+               service.__name__))
+        else:
+          rest_collision_tracker[rest_identifier] = service.__name__
+
     if method_map:
       descriptor['methods'] = method_map
       descriptor['descriptor'] = self.__schema_descriptor(services)
@@ -1738,6 +1787,10 @@
       defaults['ownerName'] = api_info.owner_name
     if api_info.package_path:
       defaults['packagePath'] = api_info.package_path
+    if api_info.title:
+      defaults['title'] = api_info.title
+    if api_info.documentation:
+      defaults['documentation'] = api_info.documentation
     return defaults
 
   def pretty_print_config_to_json(self, services, hostname=None):
diff --git a/google/appengine/ext/endpoints/apiserving.py b/google/appengine/ext/endpoints/apiserving.py
index 381510f..0ab4fe7 100644
--- a/google/appengine/ext/endpoints/apiserving.py
+++ b/google/appengine/ext/endpoints/apiserving.py
@@ -220,30 +220,10 @@
         api_services.remove(entry)
         api_services.extend(entry.get_api_classes())
 
-    protorpc_services = []
-    generator = api_config.ApiConfigGenerator()
     self.api_config_registry = api_backend_service.ApiConfigRegistry()
-    api_name_version_map = {}
-    for service in api_services:
-      key = (service.api_info.name, service.api_info.version)
-      services = api_name_version_map.setdefault(key, [])
-      if service in services:
-        raise api_config.ApiConfigurationError(
-            'Can\'t add the same class to an API twice: %s' % service.__name__)
-      services.append(service)
-
-    for services in api_name_version_map.values():
-      config_file = generator.pretty_print_config_to_json(services)
-
-
-
-      self.api_config_registry.register_spi(config_file)
-      for api_service in services:
-        protorpc_class_name = api_service.__name__
-        root = self.__SPI_PREFIX + protorpc_class_name
-        if not any(service[0] == root or service[1] == api_service
-                   for service in protorpc_services):
-          protorpc_services.append((root, api_service))
+    api_name_version_map = self.__create_name_version_map(api_services)
+    protorpc_services = self.__register_services(api_name_version_map,
+                                                 self.api_config_registry)
 
 
     backend_service = api_backend_service.BackendServiceImpl.new_factory(
@@ -262,6 +242,85 @@
     self.service_app = wsgi_service.service_mappings(protorpc_services,
                                                      **kwargs)
 
+  @staticmethod
+  def __create_name_version_map(api_services):
+    """Create a map from API name/version to Service class/factory.
+
+    This creates a map from an API name and version to a list of remote.Service
+    factories that implement that API.
+
+    Args:
+      api_services: A list of remote.Service-derived classes or factories
+        created with remote.Service.new_factory.
+
+    Returns:
+      A mapping from (api name, api version) to a list of service factories,
+      for service classes that implement that API.
+
+    Raises:
+      ApiConfigurationError: If a Service class appears more than once
+        in api_services.
+    """
+    api_name_version_map = {}
+    for service_factory in api_services:
+      try:
+        service_class = service_factory.service_class
+      except AttributeError:
+        service_class = service_factory
+        service_factory = service_class.new_factory()
+
+      key = service_class.api_info.name, service_class.api_info.version
+      service_factories = api_name_version_map.setdefault(key, [])
+      if service_factory in service_factories:
+        raise api_config.ApiConfigurationError(
+            'Can\'t add the same class to an API twice: %s' %
+            service_factory.service_class.__name__)
+
+      service_factories.append(service_factory)
+    return api_name_version_map
+
+  @staticmethod
+  def __register_services(api_name_version_map, api_config_registry):
+    """Register & return a list of each SPI URL and class that handles that URL.
+
+    This finds every service class in api_name_version_map, registers it with
+    the given ApiConfigRegistry, builds the SPI url for that class, and adds
+    the URL and its factory to a list that's returned.
+
+    Args:
+      api_name_version_map: A mapping from (api name, api version) to a list of
+        service factories, as returned by __create_name_version_map.
+      api_config_registry: The ApiConfigRegistry where service classes will
+        be registered.
+
+    Returns:
+      A list of (SPI URL, service_factory) for each service class in
+      api_name_version_map.
+
+    Raises:
+      ApiConfigurationError: If a Service class appears more than once
+        in api_name_version_map.  This could happen if one class is used to
+        implement multiple APIs.
+    """
+    generator = api_config.ApiConfigGenerator()
+    protorpc_services = []
+    for service_factories in api_name_version_map.itervalues():
+      service_classes = [service_factory.service_class
+                         for service_factory in service_factories]
+      config_file = generator.pretty_print_config_to_json(service_classes)
+      api_config_registry.register_spi(config_file)
+
+      for service_factory in service_factories:
+        protorpc_class_name = service_factory.service_class.__name__
+        root = _ApiServer.__SPI_PREFIX + protorpc_class_name
+        if any(service_map[0] == root or service_map[1] == service_factory
+               for service_map in protorpc_services):
+          raise api_config.ApiConfigurationError(
+              'Can\'t reuse the same class in multiple APIs: %s' %
+              protorpc_class_name)
+        protorpc_services.append((root, service_factory))
+    return protorpc_services
+
   def __is_request_restricted(self, environ):
     """Determine if access to SPI should be denied.
 
diff --git a/google/appengine/ext/endpoints/message_parser.py b/google/appengine/ext/endpoints/message_parser.py
index 8be547c..b24de99 100644
--- a/google/appengine/ext/endpoints/message_parser.py
+++ b/google/appengine/ext/endpoints/message_parser.py
@@ -201,6 +201,12 @@
         type_info['type'] = schema_type[0]
         if schema_type[1]:
           type_info['format'] = schema_type[1]
+
+      if type(field) == messages.EnumField:
+        sorted_enums = sorted([enum_info for enum_info in field.type],
+                              key=lambda enum_info: enum_info.number)
+        type_info['enum'] = [enum_info.name for enum_info in sorted_enums]
+
       if field.required:
         descriptor['required'] = True
 
diff --git a/google/appengine/ext/endpoints/protojson.py b/google/appengine/ext/endpoints/protojson.py
index 069d616..8e84c2d 100644
--- a/google/appengine/ext/endpoints/protojson.py
+++ b/google/appengine/ext/endpoints/protojson.py
@@ -19,6 +19,8 @@
 """Endpoints-specific implementation of ProtoRPC's ProtoJson class."""
 
 
+import base64
+
 from protorpc import messages
 from protorpc import protojson
 
@@ -63,3 +65,24 @@
         return value
 
     return super(EndpointsProtoJson, self).encode_field(field, value)
+
+  def decode_field(self, field, value):
+    """Decode a JSON value to a python value.
+
+    Args:
+      field: A ProtoRPC field instance.
+      value: A serialized JSON value.
+
+    Returns:
+      A Python value compatible with field.
+    """
+
+
+
+    if isinstance(field, messages.BytesField):
+      try:
+        return base64.urlsafe_b64decode(value)
+      except TypeError, err:
+        raise messages.DecodeError('Base64 decoding error: %s' % err)
+
+    return super(EndpointsProtoJson, self).decode_field(field, value)
diff --git a/google/appengine/ext/endpoints/users_id_token.py b/google/appengine/ext/endpoints/users_id_token.py
index ac30121..3afe827 100644
--- a/google/appengine/ext/endpoints/users_id_token.py
+++ b/google/appengine/ext/endpoints/users_id_token.py
@@ -65,8 +65,11 @@
   _CRYPTO_LOADED = False
 
 
-__all__ = ['get_current_user']
+__all__ = ['get_current_user',
+           'InvalidGetUserCall',
+           'SKIP_CLIENT_ID_CHECK']
 
+SKIP_CLIENT_ID_CHECK = ['*']
 _CLOCK_SKEW_SECS = 300
 _MAX_TOKEN_LIFETIME_SECS = 86400
 _DEFAULT_CERT_URI = ('https://www.googleapis.com/service_accounts/v1/metadata/'
@@ -84,6 +87,10 @@
   pass
 
 
+class InvalidGetUserCall(Exception):
+  """Called get_current_user when the environment was not set up for it."""
+
+
 
 def get_current_user():
   """Get user information from the id_token or oauth token in the request.
@@ -102,10 +109,16 @@
     None if there is no token or it's invalid.  If the token was valid, this
       returns a User.  Only the user's email field is guaranteed to be set.
       Other fields may be empty.
+
+  Raises:
+    InvalidGetUserCall: if the environment variables necessary to determine the
+      endpoints user are not set. These are typically set when processing a
+      request using an Endpoints handler. If they are not set, it likely
+      indicates that this function was called from outside an Endpoints request
+      handler.
   """
   if not _is_auth_info_available():
-    logging.error('endpoints.get_current_user() called outside a request.')
-    return None
+    raise InvalidGetUserCall('No valid endpoints user in environment.')
 
   if _ENV_USE_OAUTH_SCOPE in os.environ:
 
@@ -200,7 +213,7 @@
 
   if ((scopes == [_EMAIL_SCOPE] or scopes == (_EMAIL_SCOPE,)) and
       allowed_client_ids):
-    logging.info('Checking for id_token.')
+    logging.debug('Checking for id_token.')
     time_now = long(time.time())
     user = _get_id_token_user(token, audiences, allowed_client_ids, time_now,
                               memcache)
@@ -211,7 +224,7 @@
 
 
   if scopes:
-    logging.info('Checking for oauth token.')
+    logging.debug('Checking for oauth token.')
     if _is_local_dev():
       _set_bearer_user_vars_local(token, allowed_client_ids, scopes)
     else:
@@ -271,10 +284,10 @@
   try:
     parsed_token = _verify_signed_jwt_with_certs(token, time_now, cache)
   except _AppIdentityError, e:
-    logging.warning('id_token verification failed: %s', e)
+    logging.debug('id_token verification failed: %s', e)
     return None
   except:
-    logging.warning('id_token verification failed.')
+    logging.debug('id_token verification failed.')
     return None
 
   if _verify_parsed_token(parsed_token, audiences, allowed_client_ids):
@@ -316,7 +329,10 @@
       continue
 
 
-    if allowed_client_ids and client_id not in allowed_client_ids:
+
+
+    if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
+        client_id not in allowed_client_ids):
       logging.warning('Client ID is not allowed: %s', client_id)
       return
 
@@ -324,7 +340,7 @@
     logging.debug('Returning user from matched oauth_user.')
     return
 
-  logging.warning('Oauth framework user didn\'t match oauth token user.')
+  logging.debug('Oauth framework user didn\'t match oauth token user.')
   return None
 
 
@@ -349,8 +365,8 @@
       error_description = json.loads(result.content)['error_description']
     except (ValueError, KeyError):
       error_description = ''
-    logging.warning('Token info endpoint returned status %s: %s',
-                    result.status_code, error_description)
+    logging.error('Token info endpoint returned status %s: %s',
+                  result.status_code, error_description)
     return
   token_info = json.loads(result.content)
 
@@ -364,7 +380,8 @@
 
 
   client_id = token_info.get('issued_to')
-  if allowed_client_ids and client_id not in allowed_client_ids:
+  if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
+      client_id not in allowed_client_ids):
     logging.warning('Client ID is not allowed: %s', client_id)
     return
 
@@ -404,8 +421,8 @@
     return False
 
 
-  if not allowed_client_ids:
-    logging.warning('No allowed client IDs specified.  '
+  if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK:
+    logging.warning('Client ID check can\'t be skipped for ID tokens.  '
                     'Id_token cannot be verified.')
     return False
   elif not cid or cid not in allowed_client_ids:
@@ -468,7 +485,7 @@
 def _get_cached_certs(cert_uri, cache):
   certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE)
   if certs is None:
-    logging.info('Cert cache miss')
+    logging.debug('Cert cache miss')
     try:
       result = urlfetch.fetch(cert_uri)
     except AssertionError:
@@ -567,6 +584,10 @@
                             'for more information on pycrypto.')
 
 
+
+  local_hash = SHA256.new(signed).hexdigest()
+
+
   verified = False
   for keyvalue in certs['keyvalues']:
     modulus = _b64_to_long(keyvalue['modulus'])
@@ -574,14 +595,13 @@
     key = RSA.construct((modulus, exponent))
 
 
-
-    local_hash = SHA256.new(signed).hexdigest()
-    local_hash = local_hash.zfill(64)
-
-
     hexsig = '%064x' % key.encrypt(lsignature, '')[0]
 
-    verified = (hexsig[-64:] == local_hash)
+    hexsig = hexsig[-64:]
+
+
+
+    verified = (hexsig == local_hash)
     if verified:
       break
   if not verified:
diff --git a/google/appengine/ext/go/__init__.py b/google/appengine/ext/go/__init__.py
deleted file mode 100644
index c71d59d..0000000
--- a/google/appengine/ext/go/__init__.py
+++ /dev/null
@@ -1,600 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2007 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-
-"""A bridge between dev_appserver.py and a Go app."""
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-import asyncore
-import atexit
-import datetime
-import errno
-import getpass
-import logging
-import os
-import random
-import re
-import shutil
-import signal
-import socket
-import subprocess
-import stat
-import sys
-import tempfile
-import threading
-import time
-
-from google.appengine.ext.remote_api import handler
-from google.appengine.ext.remote_api import remote_api_pb
-from google.appengine.runtime import apiproxy_errors
-from google.appengine.tools import dev_appserver
-
-GAB_WORK_DIR = None
-GO_APP = None
-GO_APP_NAME = '_go_app'
-GO_HTTP_PORT = 0
-GO_API_PORT = 0
-RAPI_HANDLER = None
-HEALTH_CHECK_PATH = '/_appengine_delegate_health_check'
-INTERNAL_SERVER_ERROR = ('Status: 500 Internal Server Error\r\n' +
-    'Content-Type: text/plain\r\n\r\nInternal Server Error')
-MAX_START_TIME = 10
-
-
-
-HEADER_MAP = {
-    'APPLICATION_ID': 'X-AppEngine-Inbound-AppId',
-    'CONTENT_TYPE': 'Content-Type',
-    'CURRENT_VERSION_ID': 'X-AppEngine-Inbound-Version-Id',
-    'REMOTE_ADDR': 'X-AppEngine-Remote-Addr',
-    'REQUEST_LOG_ID': 'X-AppEngine-Request-Log-Id',
-    'USER_EMAIL': 'X-AppEngine-Inbound-User-Email',
-    'USER_ID': 'X-AppEngine-Inbound-User-Id',
-    'USER_IS_ADMIN': 'X-AppEngine-Inbound-User-Is-Admin',
-}
-
-
-ENV_PASSTHROUGH = re.compile(
-    r'^(BACKEND_PORT\..*|INSTANCE_ID|SERVER_SOFTWARE)$'
-)
-
-
-OS_ENV_PASSTHROUGH = (
-
-    'SYSTEMROOT',
-
-    'USER',
-)
-
-
-APP_CONFIG = None
-
-
-def quiet_kill(pid):
-  """Send a SIGTERM to pid; won't raise an exception if pid is not running."""
-  try:
-    os.kill(pid, signal.SIGTERM)
-  except OSError:
-    pass
-
-
-def pick_unused_port():
-  for _ in range(10):
-    port = int(random.uniform(32768, 60000))
-    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    try:
-      s.bind(('127.0.0.1', port))
-      return port
-    except socket.error:
-      logging.info('could not bind to port %d', port)
-    finally:
-      s.close()
-  raise dev_appserver.ExecuteError('could not pick an unused port')
-
-
-def gab_work_dir(config, user, port):
-  base = os.getenv('XDG_CACHE_HOME')
-  if not base:
-    if sys.platform == 'darwin':
-      base = os.path.join(os.getenv('HOME'), 'Library', 'Caches',
-                          'com.google.GoAppEngine')
-    else:
-
-      base = os.path.join(os.path.expanduser('~'), '.cache')
-
-
-  if os.path.islink(base):
-    try:
-      os.makedirs(os.path.realpath(base))
-    except OSError, e:
-
-      if e.errno != errno.EEXIST:
-        raise
-
-  app = re.sub(r'[.:]', '_', config.application)
-  return os.path.join(base,
-      'dev_appserver_%s_%s_%s_go_app_work_dir' % (app, user, port))
-
-
-def cleanup():
-  try:
-    shutil.rmtree(GAB_WORK_DIR)
-  except:
-    pass
-
-
-class DelegateClient(asyncore.dispatcher):
-  def __init__(self, http_req):
-    asyncore.dispatcher.__init__(self)
-    self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-    self.connect(('127.0.0.1', GO_HTTP_PORT))
-    self.buffer = http_req
-    self.result = ''
-    self.closed = False
-
-  def handle_close(self):
-    self.close()
-    self.closed = True
-
-  def handle_connect(self):
-    pass
-
-  def handle_read(self):
-    self.result += self.recv(8192)
-
-  def handle_write(self):
-    sent = self.send(self.buffer)
-    self.buffer = self.buffer[sent:]
-
-  def writable(self):
-    return len(self.buffer) > 0
-
-
-class DelegateServer(asyncore.dispatcher):
-  def __init__(self):
-    asyncore.dispatcher.__init__(self)
-    self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
-    self.bind(('127.0.0.1', GO_API_PORT))
-    self.listen(5)
-
-  def handle_accept(self):
-    pair = self.accept()
-    if not pair:
-      return
-    sock, addr = pair
-    RemoteAPIHandler(sock)
-
-  def writable(self):
-    return False
-
-
-class RemoteAPIHandler(asyncore.dispatcher_with_send):
-  def __init__(self, sock):
-    asyncore.dispatcher_with_send.__init__(self, sock)
-
-
-
-    self.n = -1
-    self.data = ''
-
-  def handle_read(self):
-    self.data += self.recv(8192)
-    if self.n == -1:
-      i = self.data.find('\n')
-      if i == -1:
-
-        return
-      try:
-        self.n = int(self.data[:i])
-      except:
-        self.n = -2
-      if self.n < 0:
-
-        self.n = -2
-        self.data = ''
-        return
-      self.data = self.data[i+1:]
-    elif self.n == -2:
-      self.data = ''
-      return
-    if len(self.data) < self.n:
-
-      return
-
-    req = remote_api_pb.Request()
-    req.ParseFromString(self.data[:self.n])
-    self.data, self.n = self.data[self.n:], -1
-    rapi_result = None
-    rapi_error = 'unknown error'
-    try:
-      rapi_result = RAPI_HANDLER.ExecuteRequest(req)
-    except apiproxy_errors.CallNotFoundError, e:
-
-
-      service_name = req.service_name()
-      method = req.method()
-      rapi_error = 'call not found for %s/%s' % (service_name, method)
-    except Exception, e:
-      rapi_error = str(e)
-
-    res = remote_api_pb.Response()
-    if rapi_result:
-      res.set_response(rapi_result.Encode())
-    else:
-      ae = res.mutable_application_error()
-
-
-      ae.set_code(1)
-      ae.set_detail(rapi_error)
-    res1 = res.Encode()
-    self.send('%d\n' % len(res1))
-    self.send(res1)
-
-
-
-
-def find_app_files(basedir):
-  if not basedir.endswith(os.path.sep):
-    basedir = basedir + os.path.sep
-  files, dirs = {}, [basedir]
-  while dirs:
-    dname = dirs.pop()
-    for entry in os.listdir(dname):
-      ename = os.path.join(dname, entry)
-      if APP_CONFIG.skip_files.match(ename):
-        continue
-      try:
-        s = os.stat(ename)
-      except OSError, e:
-        logging.warn('%s', e)
-        continue
-      if stat.S_ISDIR(s[stat.ST_MODE]):
-        dirs.append(ename)
-        continue
-      files[ename[len(basedir):]] = s[stat.ST_MTIME]
-  return files
-
-
-
-
-def find_go_files_mtime(app_files):
-  files, mtime = [], 0
-  for f, mt in app_files.items():
-    if not f.endswith('.go'):
-      continue
-    if APP_CONFIG.nobuild_files.match(f):
-      continue
-    files.append(f)
-    mtime = max(mtime, mt)
-  return files, mtime
-
-
-def wait_until_go_app_ready(proc, tee):
-
-  deadline = (datetime.datetime.now() +
-              datetime.timedelta(seconds=MAX_START_TIME))
-  while datetime.datetime.now() < deadline:
-    if proc.poll():
-      raise dev_appserver.ExecuteError('Go app failed during init', tee.buf)
-    try:
-      s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-      s.connect(('127.0.0.1', GO_HTTP_PORT))
-      s.send('HEAD %s HTTP/1.0\r\n\r\n' % HEALTH_CHECK_PATH)
-      s.close()
-      return
-    except:
-      time.sleep(0.1)
-  quiet_kill(proc.pid)
-  raise dev_appserver.ExecuteError('unable to start ' + GO_APP_NAME, tee.buf)
-
-
-def up(path, n):
-  """Return the nth parent directory of the given path."""
-  for _ in range(n):
-    path = os.path.dirname(path)
-  return path
-
-
-class Tee(threading.Thread):
-  """A simple line-oriented "tee".
-
-  This class connects two file-like objects, piping the output of one to the
-  input of the other, and buffering the last N lines.
-  """
-
-  MAX_LINES = 100
-
-  def __init__(self, in_f, out_f):
-    threading.Thread.__init__(self, name='Tee')
-    self.__in = in_f
-    self.__out = out_f
-    self.buf = []
-
-  def run(self):
-    while True:
-      line = self.__in.readline()
-      if not line:
-        break
-      self.__out.write(line)
-      self.buf.append(line)
-      if len(self.buf) > Tee.MAX_LINES:
-        self.buf.pop(0)
-
-
-class GoApp:
-  def __init__(self, root_path):
-    self.root_path = root_path
-    self.proc = None
-    self.proc_start = 0
-    self.last_extras_hash = None
-    self.goroot = os.path.join(
-
-        up(__file__, 5),
-        'goroot')
-    if not os.path.isdir(self.goroot):
-      raise Exception('no goroot found at ' + self.goroot)
-
-
-    self.arch = None
-    arch_map = {
-        'arm': '5',
-        'amd64': '6',
-        '386': '8',
-    }
-    for p in os.listdir(os.path.join(self.goroot, 'pkg', 'tool')):
-
-      if '_' not in p:
-        continue
-      arch = p.split('_', 1)[1]
-      if arch in arch_map:
-        self.arch = arch_map[arch]
-        break
-    if not self.arch:
-      raise Exception('bad goroot: no compiler found')
-
-    atexit.register(self.cleanup)
-
-  def cleanup(self):
-    if self.proc:
-      quiet_kill(self.proc.pid)
-      self.proc = None
-
-  def make_and_run(self, env):
-    app_files = find_app_files(self.root_path)
-    go_files, go_mtime = find_go_files_mtime(app_files)
-    if not go_files:
-      raise Exception('no .go files in %s', self.root_path)
-    app_mtime = max(app_files.values())
-    bin_name, bin_mtime = os.path.join(GAB_WORK_DIR, GO_APP_NAME), 0
-    try:
-      bin_mtime = os.stat(bin_name)[stat.ST_MTIME]
-    except:
-      pass
-
-
-
-
-
-    rebuild, restart = False, False
-    if go_mtime >= bin_mtime:
-      rebuild, restart = True, True
-    elif app_mtime > self.proc_start:
-      restart = True
-    if not rebuild:
-
-
-
-      h = self.extras_hash(go_files)
-      if h != self.last_extras_hash:
-        logging.info('extra-app files hash changed to %s; rebuilding', h)
-        self.last_extras_hash = h
-        rebuild, restart = True, True
-
-    if restart and self.proc:
-      quiet_kill(self.proc.pid)
-      self.proc.wait()
-      self.proc = None
-    if rebuild:
-      self.build(go_files)
-
-
-    if not self.proc or self.proc.poll() is not None:
-      logging.info('running %s, HTTP port = %d, API port = %d',
-          GO_APP_NAME, GO_HTTP_PORT, GO_API_PORT)
-
-      limited_env = {
-          'GOROOT': self.goroot,
-          'PWD': self.root_path,
-          'TZ': 'UTC',
-      }
-      for k, v in env.items():
-        if ENV_PASSTHROUGH.match(k):
-          limited_env[k] = v
-      for e in OS_ENV_PASSTHROUGH:
-        if e in os.environ:
-          limited_env[e] = os.environ[e]
-      self.proc_start = app_mtime
-      self.proc = subprocess.Popen([bin_name,
-          '-addr_http', 'tcp:127.0.0.1:%d' % GO_HTTP_PORT,
-          '-addr_api', 'tcp:127.0.0.1:%d' % GO_API_PORT],
-          stderr=subprocess.PIPE,
-          cwd=self.root_path, env=limited_env)
-      tee = Tee(self.proc.stderr, sys.stderr)
-      tee.start()
-      wait_until_go_app_ready(self.proc, tee)
-
-  def _gab_args(self):
-    argv = [
-        os.path.join(self.goroot, 'bin', 'go-app-builder'),
-        '-app_base', self.root_path,
-        '-arch', self.arch,
-        '-binary_name', GO_APP_NAME,
-        '-dynamic',
-        '-goroot', self.goroot,
-        '-unsafe',
-        '-work_dir', GAB_WORK_DIR,
-    ]
-    if 'GOPATH' in os.environ:
-      argv.extend(['-gopath', os.environ['GOPATH']])
-    return argv
-
-  def build(self, go_files):
-    logging.info('building ' + GO_APP_NAME)
-    if not os.path.exists(GAB_WORK_DIR):
-      os.makedirs(GAB_WORK_DIR)
-    gab_argv = self._gab_args() + go_files
-    try:
-      p = subprocess.Popen(gab_argv, stdout=subprocess.PIPE,
-                           stderr=subprocess.PIPE, env={})
-      gab_retcode = p.wait()
-    except Exception, e:
-      raise Exception('cannot call go-app-builder', e)
-    if gab_retcode != 0:
-      raise dev_appserver.CompileError(p.stdout.read() + '\n' + p.stderr.read())
-
-  def extras_hash(self, go_files):
-    logging.info('checking extra files')
-    gab_argv = self._gab_args() + ['-print_extras_hash'] + go_files
-    try:
-      p = subprocess.Popen(gab_argv, stdout=subprocess.PIPE,
-                           stderr=subprocess.PIPE, env={})
-      gab_retcode = p.wait()
-    except Exception, e:
-      raise Exception('cannot call go-app-builder', e)
-    if gab_retcode != 0:
-      raise dev_appserver.CompileError(p.stderr.read())
-    return p.stdout.read()
-
-
-OldSigTermHandler = None
-
-def SigTermHandler(signum, frame):
-  if GO_APP:
-    GO_APP.cleanup()
-  if OldSigTermHandler:
-    OldSigTermHandler(signum, frame)
-
-def execute_go_cgi(root_path, config, handler_path, cgi_path,
-                   env, infile, outfile):
-
-  global RAPI_HANDLER, GAB_WORK_DIR, GO_APP, GO_HTTP_PORT, GO_API_PORT
-  global OldSigTermHandler
-  if not RAPI_HANDLER:
-    GAB_WORK_DIR = gab_work_dir(config, getpass.getuser(), env['SERVER_PORT'])
-    GO_HTTP_PORT = pick_unused_port()
-    GO_API_PORT = pick_unused_port()
-    atexit.register(cleanup)
-    try:
-
-
-
-
-
-
-
-
-
-      OldSigTermHandler = signal.signal(signal.SIGTERM, SigTermHandler)
-    except ValueError:
-
-
-
-
-
-
-
-
-
-
-
-
-      pass
-    DelegateServer()
-    RAPI_HANDLER = handler.ApiCallHandler()
-    GO_APP = GoApp(root_path)
-  GO_APP.make_and_run(env)
-
-
-  request_method = env['REQUEST_METHOD']
-  server_protocol = env['SERVER_PROTOCOL']
-  request_uri = env['PATH_INFO']
-  if env.get('QUERY_STRING'):
-    request_uri += '?' + env['QUERY_STRING']
-  content = infile.getvalue()
-  headers = []
-  for k, v in env.items():
-    if k in HEADER_MAP:
-      headers.append('%s: %s' % (HEADER_MAP[k], v))
-    elif k.startswith('HTTP_'):
-      hk = k[5:].replace("_", "-")
-      if hk.title() == 'Connection':
-        continue
-      headers.append('%s: %s' % (hk, v))
-
-  headers.append('Content-Length: %d' % len(content))
-  headers.append('Connection: close')
-  http_req = (request_method + ' ' + request_uri + ' ' + server_protocol +
-      '\r\n' + '\r\n'.join(headers) + '\r\n\r\n' + content)
-
-
-
-
-
-  old_env = os.environ.copy()
-  try:
-    os.environ.clear()
-    os.environ.update(env)
-
-
-    x = DelegateClient(http_req)
-    while not x.closed:
-      asyncore.loop(30.0, False, None, 1)
-    res = x.result
-  finally:
-    os.environ.clear()
-    os.environ.update(old_env)
-
-
-
-  if res.startswith('HTTP/1.0 ') or res.startswith('HTTP/1.1 '):
-    res = 'Status:' + res[8:]
-  else:
-    res = INTERNAL_SERVER_ERROR
-  outfile.write(res)
diff --git a/google/appengine/ext/mapreduce/base_handler.py b/google/appengine/ext/mapreduce/base_handler.py
index 9d99972..ad790db 100644
--- a/google/appengine/ext/mapreduce/base_handler.py
+++ b/google/appengine/ext/mapreduce/base_handler.py
@@ -40,10 +40,11 @@
 import simplejson
 
 import google
+
 try:
-  from appengine_pipeline.src import pipeline
+  from google.appengine.ext.mapreduce import pipeline_base
 except ImportError:
-  pipeline = None
+  pipeline_base = None
 from google.appengine.ext import webapp
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import model
@@ -204,21 +205,8 @@
     self.request = self._RequestWrapper(self.request)
 
 
+if pipeline_base:
 
-_DEFAULT_BASE_PATH = "/_ah/mapreduce"
-_DEFAULT_PIPELINE_BASE_PATH = _DEFAULT_BASE_PATH + "/pipeline"
-
-
-if pipeline:
-  class PipelineBase(pipeline.Pipeline):
-    """Base class for all pipelines within mapreduce framework.
-
-    Rewrites base path to use pipeline library bundled with mapreduce.
-    """
-
-    def start(self, **kwargs):
-      if "base_path" not in kwargs:
-        kwargs["base_path"] = _DEFAULT_PIPELINE_BASE_PATH
-      return pipeline.Pipeline.start(self, **kwargs)
+  PipelineBase = pipeline_base.PipelineBase
 else:
   PipelineBase = None
diff --git a/google/appengine/ext/mapreduce/control.py b/google/appengine/ext/mapreduce/control.py
index 28312ab..d6f33c0 100644
--- a/google/appengine/ext/mapreduce/control.py
+++ b/google/appengine/ext/mapreduce/control.py
@@ -40,9 +40,9 @@
 import logging
 import google
 
-from google.appengine.ext.mapreduce import base_handler
 from google.appengine.ext.mapreduce import handlers
 from google.appengine.ext.mapreduce import model
+from google.appengine.ext.mapreduce import parameters
 
 
 _DEFAULT_SHARD_COUNT = 8
@@ -97,7 +97,7 @@
   if not shard_count:
     shard_count = _DEFAULT_SHARD_COUNT
   if base_path is None:
-    base_path = base_handler._DEFAULT_BASE_PATH
+    base_path = parameters._DEFAULT_BASE_PATH
 
   if mapper_parameters:
     mapper_parameters = dict(mapper_parameters)
diff --git a/google/appengine/ext/mapreduce/handlers.py b/google/appengine/ext/mapreduce/handlers.py
index 067ba43..56d8215 100644
--- a/google/appengine/ext/mapreduce/handlers.py
+++ b/google/appengine/ext/mapreduce/handlers.py
@@ -122,6 +122,9 @@
 class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler):
   """Callback handler for mapreduce worker task."""
 
+  _TASK_STATE = util._enum(RETRY_TASK="retry_task",
+                           DROP_TASK="drop_task")
+
   def __init__(self, *args):
     """Constructor."""
     super(MapperWorkerCallbackHandler, self).__init__(*args)
@@ -138,8 +141,8 @@
       tstate: model.TransientShardState from taskqueue paylod.
 
     Returns:
-      True if lease is acquired. None if this task should be retried.
-    False if this task should be dropped. Only old tasks
+      A fresh shard state entity if lease is acquired. A _TASK_STATE
+    enum if this task should be retried or dropped. Only old tasks
     (comparing to datastore state) will be dropped. Future tasks are
     retried until they naturally become old so that we don't ever stuck MR.
     """
@@ -148,13 +151,13 @@
       logging.warning("State not found for shard %s; Possible spurious task "
                       "execution. Dropping this task.",
                       tstate.shard_id)
-      return False
+      return self._TASK_STATE.DROP_TASK
 
     if not shard_state.active:
       logging.warning("Shard %s is not active. Possible spurious task "
                       "execution. Dropping this task.", tstate.shard_id)
       logging.warning(str(shard_state))
-      return False
+      return self._TASK_STATE.DROP_TASK
 
 
     if shard_state.retries > tstate.retries:
@@ -164,7 +167,7 @@
           tstate.shard_id,
           tstate.retries)
       logging.warning(str(shard_state))
-      return False
+      return self._TASK_STATE.DROP_TASK
     elif shard_state.retries < tstate.retries:
 
 
@@ -172,7 +175,7 @@
       logging.warning(
           "ShardState for %s is behind slice. Waiting for it to catch up",
           shard_state.shard_id)
-      return
+      return self._TASK_STATE.RETRY_TASK
 
 
 
@@ -180,7 +183,7 @@
       logging.warning(
           "Task %s-%s is behind ShardState %s. Dropping task.""",
           tstate.shard_id, tstate.slice_id, shard_state.slice_id)
-      return False
+      return self._TASK_STATE.DROP_TASK
 
 
 
@@ -188,7 +191,7 @@
       logging.warning(
           "Task %s-%s is ahead of ShardState %s. Waiting for it to catch up.",
           tstate.shard_id, tstate.slice_id, shard_state.slice_id)
-      return
+      return self._TASK_STATE.RETRY_TASK
 
 
 
@@ -204,16 +207,21 @@
 
 
         time.sleep(countdown)
-        return
+        return self._TASK_STATE.RETRY_TASK
 
       else:
-        if (not self._old_request_ended(shard_state) and
-            self._wait_time(shard_state, _REQUEST_EVENTUAL_TIMEOUT)):
+        if self._wait_time(shard_state, _REQUEST_EVENTUAL_TIMEOUT):
+          if not self._old_request_ended(shard_state):
+            logging.warning(
+                "Last retry of slice %s-%s is still in flight with request_id "
+                "%s. Will try again later.", tstate.shard_id, tstate.slice_id,
+                shard_state.slice_request_id)
+            return self._TASK_STATE.RETRY_TASK
+        else:
           logging.warning(
-              "Last retry of slice %s-%s is still in flight with request_id "
-              "%s. Will try again later.", tstate.shard_id, tstate.slice_id,
-              shard_state.slice_request_id)
-          return
+              "Last retry of slice %s-%s has no log entry and has"
+              "timed out after %s seconds",
+              tstate.shard_id, tstate.slice_id, _REQUEST_EVENTUAL_TIMEOUT)
 
 
     config = util.create_datastore_write_config(tstate.mapreduce_spec)
@@ -225,7 +233,7 @@
       the previous validation code). The task would die naturally eventually.
 
       Returns:
-        True if state commit succeeded. None otherwise.
+        Fresh shard state if state commit succeeded. None otherwise.
       """
       fresh_state = model.ShardState.get_by_shard_id(tstate.shard_id)
       if not fresh_state:
@@ -236,15 +244,16 @@
           fresh_state.slice_start_time == shard_state.slice_start_time):
         fresh_state.slice_start_time = datetime.datetime.now()
         fresh_state.slice_request_id = os.environ.get("REQUEST_LOG_ID")
+        fresh_state.acquired_once = True
         fresh_state.put(config=config)
-        return True
+        return fresh_state
       else:
         logging.warning(
             "Contention on slice %s-%s execution. Will retry again.",
             tstate.shard_id, tstate.slice_id)
 
         time.sleep(random.randrange(1, 5))
-        return
+        return self._TASK_STATE.RETRY_TASK
 
     return _tx()
 
@@ -327,9 +336,7 @@
     @db.transactional
     def _tx():
       fresh_state = model.ShardState.get_by_shard_id(shard_state.shard_id)
-      if (fresh_state and
-          fresh_state.active and
-          fresh_state.slice_id == shard_state.slice_id):
+      if fresh_state and fresh_state.active:
 
         fresh_state.slice_start_time = None
         fresh_state.slice_request_id = None
@@ -342,8 +349,8 @@
     except Exception, e:
       logging.warning(e)
       logging.warning(
-          "Release lock for slice %s-%s failed. Wait for lease to expire.",
-          shard_state.shard_id, shard_state.slice_id)
+          "Release lock for shard %s failed. Wait for lease to expire.",
+          shard_state.shard_id)
 
   def handle(self):
     """Handle request."""
@@ -356,11 +363,11 @@
         model.MapreduceControl.get_key_by_job_id(spec.mapreduce_id),
     ])
 
-    lease_acquired = self._try_acquire_lease(shard_state, tstate)
-    if lease_acquired is None:
+    shard_state = self._try_acquire_lease(shard_state, tstate)
+    if shard_state == self._TASK_STATE.RETRY_TASK:
       self.retry_task()
       return
-    if not lease_acquired:
+    if shard_state == self._TASK_STATE.DROP_TASK:
       return
 
     ctx = context.Context(spec, shard_state,
@@ -371,8 +378,7 @@
                    shard_state.shard_number, shard_state.mapreduce_id)
 
 
-      shard_state.active = False
-      shard_state.result_status = model.ShardState.RESULT_ABORTED
+      shard_state.set_for_abort()
       shard_state.put(config=util.create_datastore_write_config(spec))
       return
 
@@ -466,8 +472,7 @@
     ctx.flush()
 
     if finished_shard:
-      shard_state.active = False
-      shard_state.result_status = model.ShardState.RESULT_SUCCESS
+      shard_state.set_for_success()
 
   def process_data(self, data, input_reader, ctx, transient_shard_state):
     """Process a single data piece.
@@ -573,7 +578,6 @@
           tstate.shard_id,
           tstate.slice_id,
           self.task_retry_count() + 1)
-      shard_state.slice_id -= 1
       self._try_free_lease(shard_state)
       raise e
     finally:
@@ -1364,7 +1368,11 @@
     finally:
       context.Context._set(None)
 
-    if not transactional:
+
+
+
+    @db.non_transactional
+    def _txn():
 
       state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id)
       state.mapreduce_spec = mapreduce_spec
@@ -1372,8 +1380,11 @@
       state.active_shards = mapper_spec.shard_count
       if _app:
         state.app_id = _app
-      config = util.create_datastore_write_config(mapreduce_spec)
-      state.put(config=config)
+      state.put(config=util.create_datastore_write_config(mapreduce_spec))
+      return state
+
+    state = _txn()
+    if not transactional:
       parent_entity = state
 
     cls._add_kickoff_task(
diff --git a/google/appengine/ext/mapreduce/input_readers.py b/google/appengine/ext/mapreduce/input_readers.py
index 0ddae71..014d5ca 100644
--- a/google/appengine/ext/mapreduce/input_readers.py
+++ b/google/appengine/ext/mapreduce/input_readers.py
@@ -40,9 +40,9 @@
     "BlobstoreLineInputReader",
     "BlobstoreZipInputReader",
     "BlobstoreZipLineInputReader",
+    "ConsistentKeyReader",
     "COUNTER_IO_READ_BYTES",
     "COUNTER_IO_READ_MSEC",
-    "ConsistentKeyReader",
     "DatastoreEntityInputReader",
     "DatastoreInputReader",
     "DatastoreKeyInputReader",
@@ -186,7 +186,7 @@
         parameters to define the behavior of input readers.
 
     Returns:
-      A list of InputReaders. None when no input data can be found.
+      A list of InputReaders. None or [] when no input data can be found.
     """
     raise NotImplementedError("split_input() not implemented in %s" % cls)
 
@@ -343,6 +343,8 @@
                                                params[cls.FORMAT_PARAM],
                                                mapper_spec.shard_count)
 
+    if file_format_roots is None:
+      return []
     return [cls(root) for root in file_format_roots]
 
   @classmethod
@@ -1982,196 +1984,6 @@
     return {self.COUNT: self._count, self.STRING_LENGTH: self._string_length}
 
 
-class ConsistentKeyReader(_OldAbstractDatastoreInputReader):
-  """A key reader which reads consistent data from datastore.
-
-  Datastore might have entities which were written, but not visible through
-  queries for some time. Typically these entities can be only read inside
-  transaction until they are 'applied'.
-
-  This reader reads all keys even if they are not visible. It might take
-  significant time to start yielding some data because it has to apply all
-  modifications created before its start.
-  """
-  START_TIME_US_PARAM = "start_time_us"
-  UNAPPLIED_LOG_FILTER = "__unapplied_log_timestamp_us__ <"
-  DUMMY_KIND = "DUMMY_KIND"
-  DUMMY_ID = 106275677020293L
-  UNAPPLIED_QUERY_DEADLINE = 270
-
-  def _get_unapplied_jobs_accross_namespaces(self,
-                                             namespace_start,
-                                             namespace_end,
-                                             app):
-    filters = {"__key__ >=": db.Key.from_path("__namespace__",
-                                              namespace_start or 1,
-                                              _app=app),
-               "__key__ <=": db.Key.from_path("__namespace__",
-                                              namespace_end or 1,
-                                              _app=app),
-               self.UNAPPLIED_LOG_FILTER: self.start_time_us}
-    unapplied_query = datastore.Query(filters=filters, keys_only=True, _app=app)
-    return unapplied_query.Get(
-        limit=self._batch_size,
-        config=datastore_rpc.Configuration(
-            deadline=self.UNAPPLIED_QUERY_DEADLINE))
-
-  def _iter_ns_range(self):
-    while True:
-      unapplied_jobs = self._get_unapplied_jobs_accross_namespaces(
-          self._ns_range.namespace_start,
-          self._ns_range.namespace_end,
-          self._ns_range.app)
-
-      if not unapplied_jobs:
-        break
-
-      self._apply_jobs(unapplied_jobs)
-
-    for o in super(ConsistentKeyReader, self)._iter_ns_range():
-      yield o
-
-  def _iter_key_range(self, k_range):
-    assert hasattr(self, "start_time_us"), "start_time_us property was not set"
-    if self._ns_range is None:
-
-
-      self._apply_key_range(k_range)
-
-    raw_entity_kind = self._get_raw_entity_kind(self._entity_kind)
-    query = k_range.make_ascending_datastore_query(
-        raw_entity_kind, keys_only=True, filters=self._filters)
-    for key in query.Run(
-        config=datastore_query.QueryOptions(batch_size=self._batch_size)):
-      yield key, key
-
-  def _apply_key_range(self, k_range):
-    """Apply all jobs in the given KeyRange."""
-
-
-
-
-
-    apply_range = copy.deepcopy(k_range)
-    while True:
-
-
-
-      unapplied_query = self._make_unapplied_query(apply_range)
-      unapplied_jobs = unapplied_query.Get(
-          limit=self._batch_size,
-          config=datastore_rpc.Configuration(
-              deadline=self.UNAPPLIED_QUERY_DEADLINE))
-      if not unapplied_jobs:
-        break
-      self._apply_jobs(unapplied_jobs)
-
-
-      apply_range.advance(unapplied_jobs[-1])
-
-  def _make_unapplied_query(self, k_range):
-    """Returns a datastore.Query that finds the unapplied keys in k_range."""
-    unapplied_query = k_range.make_ascending_datastore_query(
-        kind=None, keys_only=True)
-    unapplied_query[
-        ConsistentKeyReader.UNAPPLIED_LOG_FILTER] = self.start_time_us
-    return unapplied_query
-
-  def _apply_jobs(self, unapplied_jobs):
-    """Apply all jobs implied by the given keys."""
-
-    keys_to_apply = []
-    for key in unapplied_jobs:
-
-
-      path = key.to_path() + [ConsistentKeyReader.DUMMY_KIND,
-                              ConsistentKeyReader.DUMMY_ID]
-      keys_to_apply.append(
-          db.Key.from_path(_app=key.app(), namespace=key.namespace(), *path))
-    db.get(keys_to_apply, config=datastore_rpc.Configuration(
-        deadline=self.UNAPPLIED_QUERY_DEADLINE,
-        read_policy=datastore_rpc.Configuration.APPLY_ALL_JOBS_CONSISTENCY))
-
-  @classmethod
-  def _split_input_from_namespace(cls,
-                                  app,
-                                  namespace,
-                                  entity_kind_name,
-                                  shard_count):
-    key_ranges = super(ConsistentKeyReader, cls)._split_input_from_namespace(
-        app, namespace, entity_kind_name, shard_count)
-    assert len(key_ranges) == shard_count
-
-
-
-
-    try:
-      last_key_range_index = key_ranges.index(None) - 1
-    except ValueError:
-      last_key_range_index = shard_count - 1
-
-    if last_key_range_index != -1:
-      key_ranges[0].key_start = None
-      key_ranges[0].include_start = False
-      key_ranges[last_key_range_index].key_end = None
-      key_ranges[last_key_range_index].include_end = False
-    return key_ranges
-
-  @classmethod
-  def _split_input_from_params(cls, app, namespaces, entity_kind_name,
-                               params, shard_count):
-    readers = super(ConsistentKeyReader, cls)._split_input_from_params(
-        app,
-        namespaces,
-        entity_kind_name,
-        params,
-        shard_count)
-
-
-
-    if not readers:
-      readers = [cls(entity_kind_name,
-                     key_ranges=None,
-                     ns_range=namespace_range.NamespaceRange(),
-                     batch_size=shard_count)]
-
-    return readers
-
-  @classmethod
-  def split_input(cls, mapper_spec):
-    """Splits input into key ranges."""
-    readers = super(ConsistentKeyReader, cls).split_input(mapper_spec)
-    start_time_us = _get_params(mapper_spec).get(
-        cls.START_TIME_US_PARAM, long(time.time() * 1e6))
-    for reader in readers:
-      reader.start_time_us = start_time_us
-    return readers
-
-  def to_json(self):
-    """Serializes all the data in this reader into json form.
-
-    Returns:
-      all the data in json-compatible map.
-    """
-    json_dict = super(ConsistentKeyReader, self).to_json()
-    json_dict[self.START_TIME_US_PARAM] = self.start_time_us
-    return json_dict
-
-  @classmethod
-  def from_json(cls, json):
-    """Create new ConsistentKeyReader from the json, encoded by to_json.
-
-    Args:
-      json: json map representation of ConsistentKeyReader.
-
-    Returns:
-      an instance of ConsistentKeyReader with all data deserialized from json.
-    """
-    reader = super(ConsistentKeyReader, cls).from_json(json)
-    reader.start_time_us = json[cls.START_TIME_US_PARAM]
-    return reader
-
-
 
 
 
@@ -2845,3 +2657,8 @@
       except EOFError:
         self._cur_handle = None
         self._record_reader = None
+
+
+
+
+ConsistentKeyReader = DatastoreKeyInputReader
diff --git a/google/appengine/ext/mapreduce/mapper_pipeline.py b/google/appengine/ext/mapreduce/mapper_pipeline.py
index 9455756..2b91118 100644
--- a/google/appengine/ext/mapreduce/mapper_pipeline.py
+++ b/google/appengine/ext/mapreduce/mapper_pipeline.py
@@ -40,12 +40,17 @@
 import google
 
 from google.appengine.api import files
-from google.appengine.ext.mapreduce import base_handler
 from google.appengine.ext.mapreduce import control
 from google.appengine.ext.mapreduce import model
+from google.appengine.ext.mapreduce import parameters
+from google.appengine.ext.mapreduce import pipeline_base
 
 
-class MapperPipeline(base_handler.PipelineBase):
+
+
+
+class MapperPipeline(pipeline_base._OutputSlotsMixin,
+                     pipeline_base.PipelineBase):
   """Pipeline wrapper for mapper job.
 
   Args:
@@ -57,7 +62,11 @@
     shards: number of shards in the job as int.
 
   Returns:
-    The list of filenames mapper was outputting to.
+    default: the list of filenames produced by the mapper if there was any
+      output and the map was completed successfully.
+    result_status: one of model.MapreduceState._RESULTS.
+    job_id: mr id that can be used to query model.MapreduceState. Available
+      immediately after run returns.
   """
   async = True
 
@@ -68,8 +77,7 @@
 
       "job_id",
 
-      "counters",
-      ]
+      "counters"] + pipeline_base._OutputSlotsMixin.output_names
 
   def run(self,
           job_name,
@@ -93,22 +101,24 @@
         )
     self.fill(self.outputs.job_id, mapreduce_id)
     self.set_status(console_url="%s/detail?job_id=%s" % (
-        (base_handler._DEFAULT_BASE_PATH, mapreduce_id)))
+        (parameters._DEFAULT_BASE_PATH, mapreduce_id)))
 
   def callback(self):
     mapreduce_id = self.outputs.job_id.value
     mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
     mapper_spec = mapreduce_state.mapreduce_spec.mapper
-    files = None
+    outputs = []
     output_writer_class = mapper_spec.output_writer_class()
-    if output_writer_class:
-      files = output_writer_class.get_filenames(mapreduce_state)
+    if (output_writer_class and
+        mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS):
+      outputs = output_writer_class.get_filenames(mapreduce_state)
 
+    self.fill(self.outputs.result_status, mapreduce_state.result_status)
     self.fill(self.outputs.counters, mapreduce_state.counters_map.to_dict())
-    self.complete(files)
+    self.complete(outputs)
 
 
-class _CleanupPipeline(base_handler.PipelineBase):
+class _CleanupPipeline(pipeline_base.PipelineBase):
   """A pipeline to do a cleanup for mapreduce jobs.
 
   Args:
diff --git a/google/appengine/ext/mapreduce/mapreduce_pipeline.py b/google/appengine/ext/mapreduce/mapreduce_pipeline.py
index eaf6a28..dffbb8e 100644
--- a/google/appengine/ext/mapreduce/mapreduce_pipeline.py
+++ b/google/appengine/ext/mapreduce/mapreduce_pipeline.py
@@ -53,19 +53,22 @@
 from appengine_pipeline.src.pipeline import common as pipeline_common
 from google.appengine.api import files
 from google.appengine.api.files import file_service_pb
-from google.appengine.ext.mapreduce import base_handler
 from google.appengine.ext.mapreduce import context
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import input_readers
 from google.appengine.ext.mapreduce import mapper_pipeline
+from google.appengine.ext.mapreduce import model
 from google.appengine.ext.mapreduce import operation
 from google.appengine.ext.mapreduce import output_writers
+from google.appengine.ext.mapreduce import pipeline_base
 from google.appengine.ext.mapreduce import shuffler
 from google.appengine.ext.mapreduce import util
 
 
 
 
+
+
 MapperPipeline = mapper_pipeline.MapperPipeline
 
 ShufflePipeline = shuffler.ShufflePipeline
@@ -73,7 +76,8 @@
 CleanupPipeline = mapper_pipeline._CleanupPipeline
 
 
-class MapPipeline(base_handler.PipelineBase):
+class MapPipeline(pipeline_base._OutputSlotsMixin,
+                  pipeline_base.PipelineBase):
   """Runs the map stage of MapReduce.
 
   Iterates over input reader and outputs data into key/value format
@@ -208,7 +212,8 @@
     return result
 
 
-class ReducePipeline(base_handler.PipelineBase):
+class ReducePipeline(pipeline_base._OutputSlotsMixin,
+                     pipeline_base.PipelineBase):
   """Runs the reduce stage of MapReduce.
 
   Merge-reads input files and runs reducer function on them.
@@ -263,7 +268,8 @@
         shards=shards)
 
 
-class MapreducePipeline(base_handler.PipelineBase):
+class MapreducePipeline(pipeline_base._OutputSlotsMixin,
+                        pipeline_base.PipelineBase):
   """Pipeline to execute MapReduce jobs.
 
   Args:
@@ -283,7 +289,10 @@
       same as the input key.
 
   Returns:
-    filenames from output writer.
+    result_status: one of model.MapreduceState._RESULTS. Check this to see
+      if the job is successful.
+    default: a list of filenames if the mapreduce was sucesssful and
+      was outputting files. An empty list otherwise.
   """
 
   def run(self,
@@ -314,4 +323,37 @@
       all_temp_files = yield pipeline_common.Extend(
           map_pipeline, shuffler_pipeline)
       yield CleanupPipeline(all_temp_files)
-    yield pipeline_common.Return(reducer_pipeline)
+
+    yield _ReturnPipeline(map_pipeline.result_status,
+                          reducer_pipeline.result_status,
+                          reducer_pipeline)
+
+
+class _ReturnPipeline(pipeline_base._OutputSlotsMixin,
+                      pipeline_base.PipelineBase):
+  """Returns Mapreduce result.
+
+  Fills outputs for MapreducePipeline. See MapreducePipeline.
+  """
+
+  output_names = ["result_status"]
+
+  def run(self,
+          map_result_status,
+          reduce_result_status,
+          reduce_outputs):
+
+    if (map_result_status == model.MapreduceState.RESULT_ABORTED or
+        reduce_result_status == model.MapreduceState.RESULT_ABORTED):
+      result_status = model.MapreduceState.RESULT_ABORTED
+    elif (map_result_status == model.MapreduceState.RESULT_FAILED or
+          reduce_result_status == model.MapreduceState.RESULT_FAILED):
+      result_status = model.MapreduceState.RESULT_FAILED
+    else:
+      result_status = model.MapreduceState.RESULT_SUCCESS
+
+    self.fill(self.outputs.result_status, result_status)
+    if result_status == model.MapreduceState.RESULT_SUCCESS:
+      yield pipeline_common.Return(reduce_outputs)
+    else:
+      yield pipeline_common.Return([])
diff --git a/google/appengine/ext/mapreduce/model.py b/google/appengine/ext/mapreduce/model.py
index 122cee2..1dea850 100644
--- a/google/appengine/ext/mapreduce/model.py
+++ b/google/appengine/ext/mapreduce/model.py
@@ -167,7 +167,7 @@
     elif len(compressed_payload) > self.MAX_DB_PAYLOAD:
       raise ValueError(
           "Payload from %s to big to be stored in database: %s" %
-          self.name, len(compressed_payload))
+          (self.name, len(compressed_payload)))
 
     else:
       if not parent:
@@ -1026,11 +1026,12 @@
 
   Properties:
     active: if we have this shard still running as boolean.
-    counters_map: shard's counters map as CountersMap. Mirrors
-      counters_map_json.
+    counters_map: shard's counters map as CountersMap. All counters yielded
+      within mapreduce are stored here.
     mapreduce_id: unique id of the mapreduce.
     shard_id: unique id of this shard as string.
     shard_number: ordered number for this shard.
+    retries: the number of times this shard has been retried.
     result_status: If not None, the final status of this shard.
     update_time: The last time this shard state was updated.
     shard_description: A string description of the work this shard will do.
@@ -1055,9 +1056,16 @@
       at all or doesn't log the end of a request. So a new request can
       proceed after a long conservative timeout.
     slice_retries: the number of times a slice has been retried due to
-      data processing error (non taskqueue/datastore). This count is
+      processing data when lock is held. Taskqueue/datastore errors
+      related to shard management are not counted. This count is
       only a lower bound and is used to determined when to fail a slice
       completely.
+    acquired_once: whether the lock for this slice has been acquired at
+      least once. When this is True, duplicates in outputs are possible.
+      This is very different from when slice_retries is 0, e.g. when
+      outputs have been written but a taskqueue problem prevents a slice
+      to continue, acquired_once would be True but slice_retries would be
+      0.
   """
 
   RESULT_SUCCESS = "success"
@@ -1078,6 +1086,7 @@
   slice_start_time = db.DateTimeProperty(indexed=False)
   slice_request_id = db.ByteStringProperty(indexed=False)
   slice_retries = db.IntegerProperty(default=0, indexed=False)
+  acquired_once = db.BooleanProperty(default=False, indexed=False)
 
 
   mapreduce_id = db.StringProperty(required=True)
@@ -1100,6 +1109,8 @@
       kv["slice_retries"] = self.slice_retries
     if self.slice_request_id:
       kv["slice_request_id"] = self.slice_request_id
+    if self.acquired_once:
+      kv["acquired_once"] = self.acquired_once
     keys = kv.keys()
     keys.sort()
 
@@ -1120,6 +1131,7 @@
     self.slice_start_time = None
     self.slice_request_id = None
     self.slice_retries = 0
+    self.acquired_once = False
 
   def advance_for_next_slice(self):
     """Advance self for next slice."""
@@ -1127,6 +1139,19 @@
     self.slice_start_time = None
     self.slice_request_id = None
     self.slice_retries = 0
+    self.acquired_once = False
+
+  def set_for_abort(self):
+    self.active = False
+    self.result_status = self.RESULT_ABORTED
+
+  def set_for_success(self):
+    self.active = False
+    self.result_status = self.RESULT_SUCCESS
+    self.slice_start_time = None
+    self.slice_request_id = None
+    self.slice_retries = 0
+    self.acquired_once = False
 
   def copy_from(self, other_state):
     """Copy data from another shard state entity to self."""
diff --git a/google/appengine/ext/mapreduce/parameters.py b/google/appengine/ext/mapreduce/parameters.py
index f7e6547..22a06e5 100644
--- a/google/appengine/ext/mapreduce/parameters.py
+++ b/google/appengine/ext/mapreduce/parameters.py
@@ -20,3 +20,7 @@
 __all__ = []
 
 DEFAULT_SHARD_RETRY_LIMIT = 3
+
+
+_DEFAULT_BASE_PATH = "/_ah/mapreduce"
+_DEFAULT_PIPELINE_BASE_PATH = _DEFAULT_BASE_PATH + "/pipeline"
diff --git a/google/appengine/ext/mapreduce/pipeline_base.py b/google/appengine/ext/mapreduce/pipeline_base.py
new file mode 100644
index 0000000..e5f139a
--- /dev/null
+++ b/google/appengine/ext/mapreduce/pipeline_base.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Base pipelines."""
+
+import google
+
+from appengine_pipeline.src import pipeline
+
+from google.appengine.ext.mapreduce import parameters
+
+
+
+
+class PipelineBase(pipeline.Pipeline):
+  """Base class for all pipelines within mapreduce framework.
+
+  Rewrites base path to use pipeline library bundled with mapreduce.
+  """
+
+  def start(self, **kwargs):
+    if "base_path" not in kwargs:
+      kwargs["base_path"] = parameters._DEFAULT_PIPELINE_BASE_PATH
+    return pipeline.Pipeline.start(self, **kwargs)
+
+
+class _OutputSlotsMixin(object):
+  """Defines common output slots for all MR user facing pipelines.
+
+  result_status: one of model.MapreduceState._RESULTS. When a MR pipeline
+    finishes, user should check this for the status of the MR job.
+  """
+
+  output_names = ["result_status"]
diff --git a/google/appengine/ext/mapreduce/shuffler.py b/google/appengine/ext/mapreduce/shuffler.py
index fedd162..4e25a23 100644
--- a/google/appengine/ext/mapreduce/shuffler.py
+++ b/google/appengine/ext/mapreduce/shuffler.py
@@ -52,13 +52,15 @@
 from google.appengine.api.files import file_service_pb
 from google.appengine.api.files import records
 from google.appengine.ext import db
-from google.appengine.ext.mapreduce import base_handler
 from google.appengine.ext.mapreduce import context
 from google.appengine.ext.mapreduce import errors
 from google.appengine.ext.mapreduce import input_readers
 from google.appengine.ext.mapreduce import mapper_pipeline
 from google.appengine.ext.mapreduce import operation
 from google.appengine.ext.mapreduce import output_writers
+from google.appengine.ext.mapreduce import pipeline_base
+
+
 
 
 class _OutputFile(db.Model):
@@ -154,7 +156,7 @@
   entity.put()
 
 
-class _SortChunksPipeline(base_handler.PipelineBase):
+class _SortChunksPipeline(pipeline_base.PipelineBase):
   """A pipeline to sort multiple key-value files.
 
   Args:
@@ -189,7 +191,7 @@
       yield pipeline_common.Return(result)
 
 
-class _CollectOutputFiles(base_handler.PipelineBase):
+class _CollectOutputFiles(pipeline_base.PipelineBase):
   """Collect output file names from _OutputFile entities for given jobs.
 
   Args:
@@ -207,7 +209,7 @@
     return result
 
 
-class _CleanupOutputFiles(base_handler.PipelineBase):
+class _CleanupOutputFiles(pipeline_base.PipelineBase):
   """Cleanup _OutputFile entities for given job ids.
 
   Args:
@@ -510,7 +512,7 @@
     ctx.get_pool(pool_name).append(proto.Encode())
 
 
-class _ShardOutputs(base_handler.PipelineBase):
+class _ShardOutputs(pipeline_base.PipelineBase):
   """Takes a flat list of filenames, returns a list of lists, each with
   one member each.
   """
@@ -539,7 +541,7 @@
   yield proto.Encode()
 
 
-class _MergePipeline(base_handler.PipelineBase):
+class _MergePipeline(pipeline_base.PipelineBase):
   """Pipeline to merge sorted chunks.
 
   This pipeline merges together individually sorted chunks of each shard.
@@ -584,7 +586,7 @@
   yield (proto.key(), proto.value())
 
 
-class _HashPipeline(base_handler.PipelineBase):
+class _HashPipeline(pipeline_base.PipelineBase):
   """A pipeline to read mapper output and hash by key.
 
   Args:
@@ -611,14 +613,14 @@
             shards=shards)
 
 
-class _ShuffleServicePipeline(base_handler.PipelineBase):
+class _ShuffleServicePipeline(pipeline_base.PipelineBase):
   """A pipeline to invoke shuffle service.
 
   Args:
     input_files: list of file names to shuffle.
 
   Returns:
-    list of shuffled file names.
+    list of shuffled file names. Empty list if there is no input.
   """
   async = True
 
@@ -628,6 +630,17 @@
       ]
 
   def run(self, job_name, input_files):
+
+
+    empty = True
+    for filename in input_files:
+      if files.stat(filename).st_size > 0:
+        empty = False
+        break
+    if empty:
+      self.complete([])
+      return
+
     shard_number = len(input_files)
     output_files = []
     for i in range(shard_number):
@@ -666,7 +679,7 @@
     return True
 
 
-class ShufflePipeline(base_handler.PipelineBase):
+class ShufflePipeline(pipeline_base.PipelineBase):
   """A pipeline to shuffle multiple key-value files.
 
   Args:
@@ -678,10 +691,13 @@
       to the number of input files.
 
   Returns:
-    The list of filenames as string. Resulting files contain serialized
-    file_service_pb.KeyValues protocol messages with all values collated
-    to a single key.
+    default: a list of filenames as string. Resulting files contain
+      serialized file_service_pb.KeyValues protocol messages with
+      all values collated to a single key. When there is no output,
+      an empty list from shuffle service or a list of empty files from
+      in memory shuffler.
   """
+
   def run(self, job_name, filenames, shards=None):
     if files.shuffler.available():
       yield _ShuffleServicePipeline(job_name, filenames)
diff --git a/google/appengine/ext/mapreduce/status.py b/google/appengine/ext/mapreduce/status.py
index d94b1f8..9084683 100644
--- a/google/appengine/ext/mapreduce/status.py
+++ b/google/appengine/ext/mapreduce/status.py
@@ -34,6 +34,7 @@
 
 
 import os
+import pkgutil
 import time
 
 from google.appengine.api import validation
@@ -295,7 +296,11 @@
     path = os.path.join(os.path.dirname(__file__), "static", real_path)
     self.response.headers["Cache-Control"] = "public; max-age=300"
     self.response.headers["Content-Type"] = content_type
-    self.response.out.write(open(path).read())
+    try:
+      data = pkgutil.get_data(__name__, "static/" + real_path)
+    except AttributeError:
+      data = None
+    self.response.out.write(data or open(path).read())
 
 
 class ListConfigsHandler(base_handler.GetJsonHandler):
diff --git a/google/appengine/ext/mapreduce/util.py b/google/appengine/ext/mapreduce/util.py
index de17a2b..2303842 100644
--- a/google/appengine/ext/mapreduce/util.py
+++ b/google/appengine/ext/mapreduce/util.py
@@ -54,6 +54,11 @@
 from google.appengine.datastore import datastore_rpc
 
 
+def _enum(**enums):
+  """Helper to create enum."""
+  return type("Enum", (), enums)
+
+
 def total_seconds(td):
   """convert a timedelta to seconds.
 
diff --git a/google/appengine/ext/ndb/__init__.py b/google/appengine/ext/ndb/__init__.py
index 38d6c97..300405b 100644
--- a/google/appengine/ext/ndb/__init__.py
+++ b/google/appengine/ext/ndb/__init__.py
@@ -1,6 +1,6 @@
 """NDB -- A new datastore API for the Google App Engine Python runtime."""
 
-__version__ = '1.0.9'
+__version__ = '1.0.10'
 
 __all__ = []
 
diff --git a/google/appengine/ext/ndb/context.py b/google/appengine/ext/ndb/context.py
index 56fd07c..f5ef435 100644
--- a/google/appengine/ext/ndb/context.py
+++ b/google/appengine/ext/ndb/context.py
@@ -794,7 +794,6 @@
       try:
         inq = tasklets.SerialQueueFuture()
         query.run_to_queue(inq, self._conn, options)
-        is_ancestor_query = query.ancestor is not None
         while True:
           try:
             batch, i, ent = yield inq.getq()
@@ -903,10 +902,10 @@
         adapter=parent._conn.adapter,
         config=parent._conn.config,
         transaction=transaction)
-      old_ds_conn = datastore._GetConnection()
       tctx = parent.__class__(conn=tconn,
                               auto_batcher_class=parent._auto_batcher_class,
                               parent_context=parent)
+      tctx._old_ds_conn = datastore._GetConnection()
       ok = False
       try:
         # Copy memcache policies.  Note that get() will never use
@@ -929,7 +928,7 @@
           raise
         except Exception:
           t, e, tb = sys.exc_info()
-          yield tconn.async_rollback(options)  # TODO: Don't block???
+          tconn.async_rollback(options)  # Fire and forget.
           if issubclass(t, datastore_errors.Rollback):
             # TODO: Raise value using tasklets.get_return_value(t)?
             return
@@ -943,7 +942,8 @@
             raise tasklets.Return(result)
             # The finally clause will run the on-commit queue.
       finally:
-        datastore._SetConnection(old_ds_conn)
+        datastore._SetConnection(tctx._old_ds_conn)
+        del tctx._old_ds_conn
         if ok:
           # Call the callbacks collected in the transaction context's
           # on-commit queue.  If the transaction failed the queue is
diff --git a/google/appengine/ext/ndb/model.py b/google/appengine/ext/ndb/model.py
index 5fe25d9..28897d1 100644
--- a/google/appengine/ext/ndb/model.py
+++ b/google/appengine/ext/ndb/model.py
@@ -120,10 +120,10 @@
 - verbose_name=<value>: A human readable name for this property.  This
   human readable name can be used for html form labels.
 
-The repeated, required and default options are mutually exclusive: a
+The repeated and required/default options are mutually exclusive: a
 repeated property cannot be required nor can it specify a default
 value (the default is always an empty list and an empty list is always
-an allowed value), and a required property cannot have a default.
+an allowed value), but a required property can have a default.
 
 Some property types have additional arguments.  Some property types
 do not support all options.
@@ -286,17 +286,17 @@
 import logging
 import zlib
 
+from .google_imports import datastore
 from .google_imports import datastore_errors
-from .google_imports import datastore_types
-from .google_imports import users
 from .google_imports import datastore_query
 from .google_imports import datastore_rpc
+from .google_imports import datastore_types
+from .google_imports import users
 from .google_imports import entity_pb
 
+from . import key as key_module  # NOTE: 'key' is a common local variable name.
 from . import utils
 
-# NOTE: 'key' is a common local variable name.
-from . import key as key_module
 Key = key_module.Key  # For export.
 
 # NOTE: Property and Error classes are added later.
@@ -783,10 +783,8 @@
       self._default = default
     if verbose_name is not None:
       self._verbose_name = verbose_name
-    if (bool(self._repeated) +
-        bool(self._required) +
-        (self._default is not None)) > 1:
-      raise ValueError('repeated, required and default are mutally exclusive.')
+    if self._repeated and (self._required or self._default is not None):
+      raise ValueError('repeated is incompatible with required or default')
     if choices is not None:
       if not isinstance(choices, (list, tuple, set, frozenset)):
         raise TypeError('choices must be a list, tuple or set; received %r' %
@@ -1251,8 +1249,9 @@
 
     This returns False if a value is stored but it is None.
     """
-    return not self._required or (self._has_value(entity) and
-                                  self._get_value(entity) is not None)
+    return (not self._required or
+            ((self._has_value(entity) or self._default is not None) and
+             self._get_value(entity) is not None))
 
   def __get__(self, entity, unused_cls=None):
     """Descriptor protocol: get the value from the entity."""
@@ -3669,11 +3668,14 @@
         if ctx is None:
           raise datastore_errors.BadRequestError(
             'Context without non-transactional ancestor')
+      save_ds_conn = datastore._GetConnection()
       try:
+        datastore._SetConnection(save_ctx._old_ds_conn)
         tasklets.set_context(ctx)
         return func(*args, **kwds)
       finally:
         tasklets.set_context(save_ctx)
+        datastore._SetConnection(save_ds_conn)
     return inner_non_transactional_wrapper
   return outer_non_transactional_wrapper
 
diff --git a/google/appengine/ext/ndb/query.py b/google/appengine/ext/ndb/query.py
index 7dcaf7e..11e29c1 100644
--- a/google/appengine/ext/ndb/query.py
+++ b/google/appengine/ext/ndb/query.py
@@ -1351,7 +1351,7 @@
     raise tasklets.Return(results, cursor, it.probably_has_next())
 
   def _make_options(self, q_options):
-    """Helper to construct a QueryOptions object from keyword arguents.
+    """Helper to construct a QueryOptions object from keyword arguments.
 
     Args:
       q_options: a dict of keyword arguments.
@@ -1366,7 +1366,7 @@
     Returns:
       A QueryOptions object, or None if q_options is empty.
     """
-    if not q_options:
+    if not (q_options or self.__projection):
       return self.default_options
     if 'options' in q_options:
       # Move 'options' to 'config' since that is what QueryOptions() uses.
@@ -1619,7 +1619,6 @@
     This is normally called by Query.iter() or Query.__iter__().
     """
     ctx = tasklets.get_context()
-    callback = None
     options = query._make_options(q_options)
     callback = self._extended_callback
     self._iter = ctx.iter_query(query,
diff --git a/google/appengine/ext/remote_api/remote_api_services.py b/google/appengine/ext/remote_api/remote_api_services.py
index 6a658f7..d7c1ce6 100644
--- a/google/appengine/ext/remote_api/remote_api_services.py
+++ b/google/appengine/ext/remote_api/remote_api_services.py
@@ -59,6 +59,9 @@
         'GetServiceAccountName': (
             app_identity_service_pb.GetServiceAccountNameRequest,
             app_identity_service_pb.GetServiceAccountNameResponse),
+        'GetDefaultGcsBucketName': (
+            app_identity_service_pb.GetDefaultGcsBucketNameRequest,
+            app_identity_service_pb.GetDefaultGcsBucketNameResponse),
         'GetAccessToken': (app_identity_service_pb.GetAccessTokenRequest,
                            app_identity_service_pb.GetAccessTokenResponse),
     },
diff --git a/google/appengine/ext/testbed/__init__.py b/google/appengine/ext/testbed/__init__.py
index 152174f..0bfe429 100644
--- a/google/appengine/ext/testbed/__init__.py
+++ b/google/appengine/ext/testbed/__init__.py
@@ -144,9 +144,10 @@
     'APPLICATION_ID': 'testbed-test',
     'AUTH_DOMAIN': 'gmail.com',
     'HTTP_HOST': 'testbed.example.com',
+    'CURRENT_MODULE_ID': 'default',
     'CURRENT_VERSION_ID': 'testbed-version',
     'REQUEST_ID_HASH': 'testbed-request-id-hash',
-    'REQUEST_LOG_ID': 'testbed-request-log-id',
+    'REQUEST_LOG_ID': '7357B3D7091D',
     'SERVER_NAME': 'testbed.example.com',
     'SERVER_SOFTWARE': 'Development/1.0 (testbed)',
     'SERVER_PORT': '80',
diff --git a/google/appengine/tools/api_server.py b/google/appengine/tools/api_server.py
index e7ede28..0f20950 100644
--- a/google/appengine/tools/api_server.py
+++ b/google/appengine/tools/api_server.py
@@ -253,7 +253,8 @@
     taskqueue_task_retry_seconds,
     taskqueue_default_http_server,
     user_login_url,
-    user_logout_url):
+    user_logout_url,
+    default_gcs_bucket_name):
   """Configures the APIs hosted by this server.
 
   Args:
@@ -304,6 +305,7 @@
     user_login_url: A str containing the url that should be used for user login.
     user_logout_url: A str containing the url that should be used for user
         logout.
+    default_gcs_bucket_name: A str overriding the usual default bucket name.
   """
 
 
@@ -314,9 +316,11 @@
 
 
 
+  tmp_app_identity_stub = app_identity_stub.AppIdentityServiceStub()
+  if default_gcs_bucket_name is not None:
+    tmp_app_identity_stub.SetDefaultGcsBucketName(default_gcs_bucket_name)
   apiproxy_stub_map.apiproxy.RegisterStub(
-      'app_identity_service',
-      app_identity_stub.AppIdentityServiceStub())
+      'app_identity_service', tmp_app_identity_stub)
 
   blob_storage = file_blob_storage.FileBlobStorage(blobstore_path, app_id)
   apiproxy_stub_map.apiproxy.RegisterStub(
@@ -579,7 +583,8 @@
                smtp_user=None,
                task_retry_seconds=None,
                trusted=None,
-               use_sqlite=None):
+               use_sqlite=None,
+               default_gcs_bucket_name=None):
     """Configures the APIs hosted by this server.
 
     Args:
@@ -638,6 +643,7 @@
       trusted: A bool indicating if privileged APIs should be made available.
       use_sqlite: A bool indicating whether DatastoreSqliteStub or
           DatastoreFileStub should be used.
+      default_gcs_bucket_name: A str overriding the normal default bucket name.
     """
     self._process = None
     self._host = host
@@ -671,6 +677,7 @@
     self._BindArgument('--task_retry_seconds', task_retry_seconds)
     self._BindArgument('--trusted', trusted)
     self._BindArgument('--use_sqlite', use_sqlite)
+    self._BindArgument('--default_gcs_bucket_name', default_gcs_bucket_name)
 
   @property
   def url(self):
@@ -836,6 +843,9 @@
   else:
     application_address = None
 
+  if not hasattr(args, 'default_gcs_bucket_name'):
+    args.default_gcs_bucket_name = None
+
   request_info._local_dispatcher = ApiServerDispatcher()
   _SetupStubs(app_id=args.application,
               application_root=args.application_root,
@@ -859,7 +869,8 @@
               taskqueue_task_retry_seconds=args.task_retry_seconds,
               taskqueue_default_http_server=application_address,
               user_login_url=args.user_login_url,
-              user_logout_url=args.user_logout_url)
+              user_logout_url=args.user_logout_url,
+              default_gcs_bucket_name=args.default_gcs_bucket_name)
   server = APIServer((args.api_host, args.api_port), args.application)
   try:
     server.serve_forever()
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index 5be2107..313b868 100644
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -81,6 +81,11 @@
 LIST_DELIMITER = '\n'
 TUPLE_DELIMITER = '|'
 BACKENDS_ACTION = 'backends'
+BACKENDS_MESSAGE = ('Looks like you\'re using Backends. We suggest that you '
+                    'make the switch to App Engine Modules. See the Modules '
+                    'documentation to learn more about converting: '
+                    'https://developers.google.com/appengine/docs/python/'
+                    'modules/converting')
 
 
 MAX_LOG_LEVEL = 4
@@ -2317,6 +2322,8 @@
 
 
     if action == BACKENDS_ACTION:
+
+      StatusUpdate(BACKENDS_MESSAGE)
       if len(self.args) < 1:
         RaiseParseError(action, self.actions[BACKENDS_ACTION])
 
@@ -2598,8 +2605,9 @@
                 self.oauth_scopes,
                 self.options.oauth2_credential_file)
 
-      appengine_rpc_httplib2.tools.FLAGS.auth_local_webserver = (
-          self.options.auth_local_webserver)
+      if hasattr(appengine_rpc_httplib2.tools, 'FLAGS'):
+        appengine_rpc_httplib2.tools.FLAGS.auth_local_webserver = (
+            self.options.auth_local_webserver)
     else:
       if not self.rpc_server_class:
         self.rpc_server_class = appengine_rpc.HttpRpcServerWithOAuth2Suggestion
@@ -3297,6 +3305,22 @@
                               payload=backends_yaml.ToYAML())
     print >> self.out_fh, response
 
+  def ListVersions(self):
+    """Lists all versions for an app."""
+    if self.args:
+      self.parser.error('Expected no arguments.')
+
+    appyaml = self._ParseAppInfoFromYaml(self.basepath)
+    rpcserver = self._GetRpcServer()
+    response = rpcserver.Send('/api/versions/list', app_id=appyaml.application)
+
+    parsed_response = yaml.safe_load(response)
+    if not parsed_response:
+      print >> self.out_fh, ('No versions uploaded for app: %s.' %
+                             appyaml.application)
+    else:
+      print >> self.out_fh, response
+
   def _ParseAndValidateModuleYamls(self, yaml_paths):
     """Validates given yaml paths and returns the parsed yaml objects.
 
@@ -4232,7 +4256,13 @@
 The 'resource_limits_info' command prints the current resource limits that
 are enforced."""),
 
-
+      'list_versions': Action(
+          function='ListVersions',
+          usage='%prog [options] list_versions <directory>',
+          short_desc='List all uploaded versions for an app.',
+          long_desc="""
+The 'list_versions' command outputs the uploaded versions for each module of
+an application in YAML."""),
   }
 
 
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index 1c1186f..66c6ab8 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -205,6 +205,9 @@
 goog.getUid = function(obj) {
   return obj[goog.UID_PROPERTY_] || (obj[goog.UID_PROPERTY_] = ++goog.uidCounter_)
 };
+goog.hasUid = function(obj) {
+  return!!obj[goog.UID_PROPERTY_]
+};
 goog.removeUid = function(obj) {
   "removeAttribute" in obj && obj.removeAttribute(goog.UID_PROPERTY_);
   try {
@@ -793,6 +796,11 @@
   !goog.asserts.ENABLE_ASSERTS || value instanceof type || goog.asserts.doAssertFailure_("instanceof check failed.", null, opt_message, Array.prototype.slice.call(arguments, 3));
   return value
 };
+goog.asserts.assertObjectPrototypeIsIntact = function() {
+  for(var key in Object.prototype) {
+    goog.asserts.fail(key + " should not be enumerable in Object.prototype.")
+  }
+};
 goog.array = {};
 goog.NATIVE_ARRAY_PROTOTYPES = goog.TRUSTED_SITE;
 goog.array.peek = function(array) {
@@ -1158,6 +1166,12 @@
   array.length && (n %= array.length, 0 < n ? goog.array.ARRAY_PROTOTYPE_.unshift.apply(array, array.splice(-n, n)) : 0 > n && goog.array.ARRAY_PROTOTYPE_.push.apply(array, array.splice(0, -n)));
   return array
 };
+goog.array.moveItem = function(arr, fromIndex, toIndex) {
+  goog.asserts.assert(0 <= fromIndex && fromIndex < arr.length);
+  goog.asserts.assert(0 <= toIndex && toIndex < arr.length);
+  var removedItems = goog.array.ARRAY_PROTOTYPE_.splice.call(arr, fromIndex, 1);
+  goog.array.ARRAY_PROTOTYPE_.splice.call(arr, toIndex, 0, removedItems[0])
+};
 goog.array.zip = function(var_args) {
   if(!arguments.length) {
     return[]
@@ -1599,11 +1613,11 @@
   var ua;
   if(!goog.userAgent.BROWSER_KNOWN_ && (ua = goog.userAgent.getUserAgentString())) {
     var navigator = goog.userAgent.getNavigator();
-    goog.userAgent.detectedOpera_ = 0 == ua.indexOf("Opera");
-    goog.userAgent.detectedIe_ = !goog.userAgent.detectedOpera_ && -1 != ua.indexOf("MSIE");
-    goog.userAgent.detectedWebkit_ = !goog.userAgent.detectedOpera_ && -1 != ua.indexOf("WebKit");
-    goog.userAgent.detectedMobile_ = goog.userAgent.detectedWebkit_ && -1 != ua.indexOf("Mobile");
-    goog.userAgent.detectedGecko_ = !goog.userAgent.detectedOpera_ && !goog.userAgent.detectedWebkit_ && "Gecko" == navigator.product
+    goog.userAgent.detectedOpera_ = goog.string.startsWith(ua, "Opera");
+    goog.userAgent.detectedIe_ = !goog.userAgent.detectedOpera_ && (goog.string.contains(ua, "MSIE") || goog.string.contains(ua, "Trident"));
+    goog.userAgent.detectedWebkit_ = !goog.userAgent.detectedOpera_ && goog.string.contains(ua, "WebKit");
+    goog.userAgent.detectedMobile_ = goog.userAgent.detectedWebkit_ && goog.string.contains(ua, "Mobile");
+    goog.userAgent.detectedGecko_ = !goog.userAgent.detectedOpera_ && !goog.userAgent.detectedWebkit_ && !goog.userAgent.detectedIe_ && "Gecko" == navigator.product
   }
 };
 goog.userAgent.BROWSER_KNOWN_ || goog.userAgent.init_();
@@ -1632,9 +1646,9 @@
   goog.userAgent.detectedLinux_ = goog.string.contains(goog.userAgent.PLATFORM, "Linux");
   goog.userAgent.detectedX11_ = !!goog.userAgent.getNavigator() && goog.string.contains(goog.userAgent.getNavigator().appVersion || "", "X11");
   var ua = goog.userAgent.getUserAgentString();
-  goog.userAgent.detectedAndroid_ = !!ua && 0 <= ua.indexOf("Android");
-  goog.userAgent.detectedIPhone_ = !!ua && 0 <= ua.indexOf("iPhone");
-  goog.userAgent.detectedIPad_ = !!ua && 0 <= ua.indexOf("iPad")
+  goog.userAgent.detectedAndroid_ = !!ua && goog.string.contains(ua, "Android");
+  goog.userAgent.detectedIPhone_ = !!ua && goog.string.contains(ua, "iPhone");
+  goog.userAgent.detectedIPad_ = !!ua && goog.string.contains(ua, "iPad")
 };
 goog.userAgent.PLATFORM_KNOWN_ || goog.userAgent.initPlatform_();
 goog.userAgent.MAC = goog.userAgent.PLATFORM_KNOWN_ ? goog.userAgent.ASSUME_MAC : goog.userAgent.detectedMac_;
@@ -1649,7 +1663,7 @@
   if(goog.userAgent.OPERA && goog.global.opera) {
     var operaVersion = goog.global.opera.version, version = "function" == typeof operaVersion ? operaVersion() : operaVersion
   }else {
-    if(goog.userAgent.GECKO ? re = /rv\:([^\);]+)(\)|;)/ : goog.userAgent.IE ? re = /MSIE\s+([^\);]+)(\)|;)/ : goog.userAgent.WEBKIT && (re = /WebKit\/(\S+)/), re) {
+    if(goog.userAgent.GECKO ? re = /rv\:([^\);]+)(\)|;)/ : goog.userAgent.IE ? re = /\b(?:MSIE|rv)\s+([^\);]+)(\)|;)/ : goog.userAgent.WEBKIT && (re = /WebKit\/(\S+)/), re) {
       var arr = re.exec(goog.userAgent.getUserAgentString()), version = arr ? arr[1] : ""
     }
   }
@@ -2496,6 +2510,808 @@
   }
   monitors.length--
 };
+goog.disposable = {};
+goog.disposable.IDisposable = function() {
+};
+goog.Disposable = function() {
+  goog.Disposable.MONITORING_MODE != goog.Disposable.MonitoringMode.OFF && (goog.Disposable.instances_[goog.getUid(this)] = this)
+};
+goog.Disposable.MonitoringMode = {OFF:0, PERMANENT:1, INTERACTIVE:2};
+goog.Disposable.MONITORING_MODE = 0;
+goog.Disposable.INCLUDE_STACK_ON_CREATION = !0;
+goog.Disposable.instances_ = {};
+goog.Disposable.getUndisposedObjects = function() {
+  var ret = [], id;
+  for(id in goog.Disposable.instances_) {
+    goog.Disposable.instances_.hasOwnProperty(id) && ret.push(goog.Disposable.instances_[Number(id)])
+  }
+  return ret
+};
+goog.Disposable.clearUndisposedObjects = function() {
+  goog.Disposable.instances_ = {}
+};
+goog.Disposable.prototype.disposed_ = !1;
+goog.Disposable.prototype.isDisposed = function() {
+  return this.disposed_
+};
+goog.Disposable.prototype.dispose = function() {
+  if(!this.disposed_ && (this.disposed_ = !0, this.disposeInternal(), goog.Disposable.MONITORING_MODE != goog.Disposable.MonitoringMode.OFF)) {
+    var uid = goog.getUid(this);
+    if(goog.Disposable.MONITORING_MODE == goog.Disposable.MonitoringMode.PERMANENT && !goog.Disposable.instances_.hasOwnProperty(uid)) {
+      throw Error(this + " did not call the goog.Disposable base constructor or was disposed of after a clearUndisposedObjects call");
+    }
+    delete goog.Disposable.instances_[uid]
+  }
+};
+goog.Disposable.prototype.disposeInternal = function() {
+  if(this.onDisposeCallbacks_) {
+    for(;this.onDisposeCallbacks_.length;) {
+      this.onDisposeCallbacks_.shift()()
+    }
+  }
+};
+goog.Disposable.isDisposed = function(obj) {
+  return obj && "function" == typeof obj.isDisposed ? obj.isDisposed() : !1
+};
+goog.dispose = function(obj) {
+  obj && "function" == typeof obj.dispose && obj.dispose()
+};
+goog.disposeAll = function(var_args) {
+  for(var i = 0, len = arguments.length;i < len;++i) {
+    var disposable = arguments[i];
+    goog.isArrayLike(disposable) ? goog.disposeAll.apply(null, disposable) : goog.dispose(disposable)
+  }
+};
+goog.events = {};
+goog.events.Event = function(type, opt_target) {
+  this.type = type;
+  this.currentTarget = this.target = opt_target
+};
+goog.events.Event.prototype.disposeInternal = function() {
+};
+goog.events.Event.prototype.dispose = function() {
+};
+goog.events.Event.prototype.propagationStopped_ = !1;
+goog.events.Event.prototype.defaultPrevented = !1;
+goog.events.Event.prototype.returnValue_ = !0;
+goog.events.Event.prototype.stopPropagation = function() {
+  this.propagationStopped_ = !0
+};
+goog.events.Event.prototype.preventDefault = function() {
+  this.defaultPrevented = !0;
+  this.returnValue_ = !1
+};
+goog.events.Event.stopPropagation = function(e) {
+  e.stopPropagation()
+};
+goog.events.Event.preventDefault = function(e) {
+  e.preventDefault()
+};
+goog.reflect = {};
+goog.reflect.object = function(type, object) {
+  return object
+};
+goog.reflect.sinkValue = function(x) {
+  goog.reflect.sinkValue[" "](x);
+  return x
+};
+goog.reflect.sinkValue[" "] = goog.nullFunction;
+goog.reflect.canAccessProperty = function(obj, prop) {
+  try {
+    return goog.reflect.sinkValue(obj[prop]), !0
+  }catch(e) {
+  }
+  return!1
+};
+goog.events.BrowserFeature = {HAS_W3C_BUTTON:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), HAS_W3C_EVENT_SUPPORT:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), SET_KEY_CODE_TO_PREVENT_DEFAULT:goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), HAS_NAVIGATOR_ONLINE_PROPERTY:!goog.userAgent.WEBKIT || goog.userAgent.isVersionOrHigher("528"), HAS_HTML5_NETWORK_EVENT_SUPPORT:goog.userAgent.GECKO && goog.userAgent.isVersionOrHigher("1.9b") || goog.userAgent.IE && 
+goog.userAgent.isVersionOrHigher("8") || goog.userAgent.OPERA && goog.userAgent.isVersionOrHigher("9.5") || goog.userAgent.WEBKIT && goog.userAgent.isVersionOrHigher("528"), HTML5_NETWORK_EVENTS_FIRE_ON_BODY:goog.userAgent.GECKO && !goog.userAgent.isVersionOrHigher("8") || goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), TOUCH_ENABLED:"ontouchstart" in goog.global || !!(goog.global.document && document.documentElement && "ontouchstart" in document.documentElement) || !(!goog.global.navigator || 
+!goog.global.navigator.msMaxTouchPoints)};
+goog.events.EventType = {CLICK:"click", DBLCLICK:"dblclick", MOUSEDOWN:"mousedown", MOUSEUP:"mouseup", MOUSEOVER:"mouseover", MOUSEOUT:"mouseout", MOUSEMOVE:"mousemove", SELECTSTART:"selectstart", KEYPRESS:"keypress", KEYDOWN:"keydown", KEYUP:"keyup", BLUR:"blur", FOCUS:"focus", DEACTIVATE:"deactivate", FOCUSIN:goog.userAgent.IE ? "focusin" : "DOMFocusIn", FOCUSOUT:goog.userAgent.IE ? "focusout" : "DOMFocusOut", CHANGE:"change", SELECT:"select", SUBMIT:"submit", INPUT:"input", PROPERTYCHANGE:"propertychange", 
+DRAGSTART:"dragstart", DRAG:"drag", DRAGENTER:"dragenter", DRAGOVER:"dragover", DRAGLEAVE:"dragleave", DROP:"drop", DRAGEND:"dragend", TOUCHSTART:"touchstart", TOUCHMOVE:"touchmove", TOUCHEND:"touchend", TOUCHCANCEL:"touchcancel", BEFOREUNLOAD:"beforeunload", CONSOLEMESSAGE:"consolemessage", CONTEXTMENU:"contextmenu", DOMCONTENTLOADED:"DOMContentLoaded", ERROR:"error", HELP:"help", LOAD:"load", LOSECAPTURE:"losecapture", READYSTATECHANGE:"readystatechange", RESIZE:"resize", SCROLL:"scroll", UNLOAD:"unload", 
+HASHCHANGE:"hashchange", PAGEHIDE:"pagehide", PAGESHOW:"pageshow", POPSTATE:"popstate", COPY:"copy", PASTE:"paste", CUT:"cut", BEFORECOPY:"beforecopy", BEFORECUT:"beforecut", BEFOREPASTE:"beforepaste", ONLINE:"online", OFFLINE:"offline", MESSAGE:"message", CONNECT:"connect", TRANSITIONEND:goog.userAgent.WEBKIT ? "webkitTransitionEnd" : goog.userAgent.OPERA ? "oTransitionEnd" : "transitionend", MSGESTURECHANGE:"MSGestureChange", MSGESTUREEND:"MSGestureEnd", MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", 
+MSGESTURETAP:"MSGestureTap", MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROVER:"MSPointerOver", MSPOINTEROUT:"MSPointerOut", MSPOINTERUP:"MSPointerUp", TEXTINPUT:"textinput", COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend", EXIT:"exit", LOADABORT:"loadabort", 
+LOADCOMMIT:"loadcommit", LOADREDIRECT:"loadredirect", LOADSTART:"loadstart", LOADSTOP:"loadstop", RESPONSIVE:"responsive", SIZECHANGED:"sizechanged", UNRESPONSIVE:"unresponsive"};
+goog.events.BrowserEvent = function(opt_e, opt_currentTarget) {
+  opt_e && this.init(opt_e, opt_currentTarget)
+};
+goog.inherits(goog.events.BrowserEvent, goog.events.Event);
+goog.events.BrowserEvent.MouseButton = {LEFT:0, MIDDLE:1, RIGHT:2};
+goog.events.BrowserEvent.IEButtonMap = [1, 4, 2];
+goog.events.BrowserEvent.prototype.target = null;
+goog.events.BrowserEvent.prototype.relatedTarget = null;
+goog.events.BrowserEvent.prototype.offsetX = 0;
+goog.events.BrowserEvent.prototype.offsetY = 0;
+goog.events.BrowserEvent.prototype.clientX = 0;
+goog.events.BrowserEvent.prototype.clientY = 0;
+goog.events.BrowserEvent.prototype.screenX = 0;
+goog.events.BrowserEvent.prototype.screenY = 0;
+goog.events.BrowserEvent.prototype.button = 0;
+goog.events.BrowserEvent.prototype.keyCode = 0;
+goog.events.BrowserEvent.prototype.charCode = 0;
+goog.events.BrowserEvent.prototype.ctrlKey = !1;
+goog.events.BrowserEvent.prototype.altKey = !1;
+goog.events.BrowserEvent.prototype.shiftKey = !1;
+goog.events.BrowserEvent.prototype.metaKey = !1;
+goog.events.BrowserEvent.prototype.event_ = null;
+goog.events.BrowserEvent.prototype.init = function(e, opt_currentTarget) {
+  var type = this.type = e.type;
+  goog.events.Event.call(this, type);
+  this.target = e.target || e.srcElement;
+  this.currentTarget = opt_currentTarget;
+  var relatedTarget = e.relatedTarget;
+  relatedTarget ? goog.userAgent.GECKO && (goog.reflect.canAccessProperty(relatedTarget, "nodeName") || (relatedTarget = null)) : type == goog.events.EventType.MOUSEOVER ? relatedTarget = e.fromElement : type == goog.events.EventType.MOUSEOUT && (relatedTarget = e.toElement);
+  this.relatedTarget = relatedTarget;
+  this.offsetX = goog.userAgent.WEBKIT || void 0 !== e.offsetX ? e.offsetX : e.layerX;
+  this.offsetY = goog.userAgent.WEBKIT || void 0 !== e.offsetY ? e.offsetY : e.layerY;
+  this.clientX = void 0 !== e.clientX ? e.clientX : e.pageX;
+  this.clientY = void 0 !== e.clientY ? e.clientY : e.pageY;
+  this.screenX = e.screenX || 0;
+  this.screenY = e.screenY || 0;
+  this.button = e.button;
+  this.keyCode = e.keyCode || 0;
+  this.charCode = e.charCode || ("keypress" == type ? e.keyCode : 0);
+  this.ctrlKey = e.ctrlKey;
+  this.altKey = e.altKey;
+  this.shiftKey = e.shiftKey;
+  this.metaKey = e.metaKey;
+  this.state = e.state;
+  this.event_ = e;
+  e.defaultPrevented && this.preventDefault();
+  delete this.propagationStopped_
+};
+goog.events.BrowserEvent.prototype.stopPropagation = function() {
+  goog.events.BrowserEvent.superClass_.stopPropagation.call(this);
+  this.event_.stopPropagation ? this.event_.stopPropagation() : this.event_.cancelBubble = !0
+};
+goog.events.BrowserEvent.prototype.preventDefault = function() {
+  goog.events.BrowserEvent.superClass_.preventDefault.call(this);
+  var be = this.event_;
+  if(be.preventDefault) {
+    be.preventDefault()
+  }else {
+    if(be.returnValue = !1, goog.events.BrowserFeature.SET_KEY_CODE_TO_PREVENT_DEFAULT) {
+      try {
+        if(be.ctrlKey || 112 <= be.keyCode && 123 >= be.keyCode) {
+          be.keyCode = -1
+        }
+      }catch(ex) {
+      }
+    }
+  }
+};
+goog.events.BrowserEvent.prototype.disposeInternal = function() {
+};
+goog.events.Listenable = function() {
+};
+goog.events.Listenable.IMPLEMENTED_BY_PROP = "closure_listenable_" + (1E6 * Math.random() | 0);
+goog.events.Listenable.addImplementation = function(cls) {
+  cls.prototype[goog.events.Listenable.IMPLEMENTED_BY_PROP] = !0
+};
+goog.events.Listenable.isImplementedBy = function(obj) {
+  return!(!obj || !obj[goog.events.Listenable.IMPLEMENTED_BY_PROP])
+};
+goog.events.ListenableKey = function() {
+};
+goog.events.ListenableKey.counter_ = 0;
+goog.events.ListenableKey.reserveKey = function() {
+  return++goog.events.ListenableKey.counter_
+};
+goog.events.Listener = function(listener, proxy, src, type, capture, opt_handler) {
+  this.listener = listener;
+  this.proxy = proxy;
+  this.src = src;
+  this.type = type;
+  this.capture = !!capture;
+  this.handler = opt_handler;
+  this.key = goog.events.ListenableKey.reserveKey();
+  this.removed = this.callOnce = !1
+};
+goog.events.Listener.ENABLE_MONITORING = !1;
+goog.events.Listener.prototype.markAsRemoved = function() {
+  this.removed = !0;
+  this.handler = this.src = this.proxy = this.listener = null
+};
+goog.events.listeners_ = {};
+goog.events.listenerTree_ = {};
+goog.events.sources_ = {};
+goog.events.onString_ = "on";
+goog.events.onStringMap_ = {};
+goog.events.listen = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.listen(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  return goog.events.Listenable.isImplementedBy(src) ? src.listen(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !1, opt_capt, opt_handler)
+};
+goog.events.listen_ = function(src, type, listener, callOnce, opt_capt, opt_handler) {
+  if(!type) {
+    throw Error("Invalid event type");
+  }
+  var capture = !!opt_capt, map = goog.events.listenerTree_;
+  type in map || (map[type] = {count_:0});
+  map = map[type];
+  capture in map || (map[capture] = {count_:0}, map.count_++);
+  var map = map[capture], srcUid = goog.getUid(src), listenerArray, listenerObj;
+  if(map[srcUid]) {
+    listenerArray = map[srcUid];
+    for(var i = 0;i < listenerArray.length;i++) {
+      if(listenerObj = listenerArray[i], listenerObj.listener == listener && listenerObj.handler == opt_handler) {
+        if(listenerObj.removed) {
+          break
+        }
+        callOnce || (listenerArray[i].callOnce = !1);
+        return listenerArray[i]
+      }
+    }
+  }else {
+    listenerArray = map[srcUid] = [], map.count_++
+  }
+  var proxy = goog.events.getProxy();
+  listenerObj = new goog.events.Listener(listener, proxy, src, type, capture, opt_handler);
+  listenerObj.callOnce = callOnce;
+  proxy.src = src;
+  proxy.listener = listenerObj;
+  listenerArray.push(listenerObj);
+  goog.events.sources_[srcUid] || (goog.events.sources_[srcUid] = []);
+  goog.events.sources_[srcUid].push(listenerObj);
+  src.addEventListener ? src.addEventListener(type, proxy, capture) : src.attachEvent(goog.events.getOnString_(type), proxy);
+  return goog.events.listeners_[listenerObj.key] = listenerObj
+};
+goog.events.getProxy = function() {
+  var proxyCallbackFunction = goog.events.handleBrowserEvent_, f = goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT ? function(eventObject) {
+    return proxyCallbackFunction.call(f.src, f.listener, eventObject)
+  } : function(eventObject) {
+    var v = proxyCallbackFunction.call(f.src, f.listener, eventObject);
+    if(!v) {
+      return v
+    }
+  };
+  return f
+};
+goog.events.listenOnce = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.listenOnce(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  return goog.events.Listenable.isImplementedBy(src) ? src.listenOnce(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !0, opt_capt, opt_handler)
+};
+goog.events.listenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
+  wrapper.listen(src, listener, opt_capt, opt_handler)
+};
+goog.events.unlisten = function(src, type, listener, opt_capt, opt_handler) {
+  if(goog.isArray(type)) {
+    for(var i = 0;i < type.length;i++) {
+      goog.events.unlisten(src, type[i], listener, opt_capt, opt_handler)
+    }
+    return null
+  }
+  listener = goog.events.wrapListener_(listener);
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.unlisten(type, listener, opt_capt, opt_handler)
+  }
+  var capture = !!opt_capt, listenerArray = goog.events.getListeners_(src, type, capture);
+  if(!listenerArray) {
+    return!1
+  }
+  for(i = 0;i < listenerArray.length;i++) {
+    if(listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
+      return goog.events.unlistenByKey(listenerArray[i])
+    }
+  }
+  return!1
+};
+goog.events.unlistenByKey = function(key) {
+  if(goog.isNumber(key)) {
+    return!1
+  }
+  var listener = key;
+  if(!listener || listener.removed) {
+    return!1
+  }
+  var src = listener.src;
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.unlistenByKey(listener)
+  }
+  var type = listener.type, proxy = listener.proxy, capture = listener.capture;
+  src.removeEventListener ? src.removeEventListener(type, proxy, capture) : src.detachEvent && src.detachEvent(goog.events.getOnString_(type), proxy);
+  var srcUid = goog.getUid(src);
+  if(goog.events.sources_[srcUid]) {
+    var sourcesArray = goog.events.sources_[srcUid];
+    goog.array.remove(sourcesArray, listener);
+    0 == sourcesArray.length && delete goog.events.sources_[srcUid]
+  }
+  listener.markAsRemoved();
+  var listenerArray = goog.events.listenerTree_[type][capture][srcUid];
+  listenerArray && (goog.array.remove(listenerArray, listener), 0 == listenerArray.length && (delete goog.events.listenerTree_[type][capture][srcUid], goog.events.listenerTree_[type][capture].count_--), 0 == goog.events.listenerTree_[type][capture].count_ && (delete goog.events.listenerTree_[type][capture], goog.events.listenerTree_[type].count_--), 0 == goog.events.listenerTree_[type].count_ && delete goog.events.listenerTree_[type]);
+  delete goog.events.listeners_[listener.key];
+  return!0
+};
+goog.events.unlistenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
+  wrapper.unlisten(src, listener, opt_capt, opt_handler)
+};
+goog.events.removeAll = function(opt_obj, opt_type) {
+  if(opt_obj) {
+    if(goog.events.Listenable.isImplementedBy(opt_obj)) {
+      return opt_obj.removeAllListeners(opt_type)
+    }
+    var count = 0, noType = null == opt_type, srcUid = goog.getUid(opt_obj);
+    if(goog.events.sources_[srcUid]) {
+      for(var sourcesArray = goog.events.sources_[srcUid], i = sourcesArray.length - 1;0 <= i;i--) {
+        var listener = sourcesArray[i];
+        if(noType || opt_type == listener.type) {
+          goog.events.unlistenByKey(listener), count++
+        }
+      }
+    }
+    return count
+  }
+  return goog.events.removeAllNativeListeners()
+};
+goog.events.removeAllNativeListeners = function() {
+  var count = 0;
+  goog.object.forEach(goog.events.listeners_, function(listener) {
+    goog.events.unlistenByKey(listener);
+    count++
+  });
+  return count
+};
+goog.events.getListeners = function(obj, type, capture) {
+  return goog.events.Listenable.isImplementedBy(obj) ? obj.getListeners(type, capture) : goog.events.getListeners_(obj, type, capture) || []
+};
+goog.events.getListeners_ = function(obj, type, capture) {
+  var map = goog.events.listenerTree_;
+  if(type in map && (map = map[type], capture in map)) {
+    var map = map[capture], objUid = goog.getUid(obj);
+    if(map[objUid]) {
+      return map[objUid]
+    }
+  }
+  return null
+};
+goog.events.getListener = function(src, type, listener, opt_capt, opt_handler) {
+  var capture = !!opt_capt;
+  listener = goog.events.wrapListener_(listener);
+  if(goog.events.Listenable.isImplementedBy(src)) {
+    return src.getListener(type, listener, capture, opt_handler)
+  }
+  var listenerArray = goog.events.getListeners_(src, type, capture);
+  if(listenerArray) {
+    for(var i = 0;i < listenerArray.length;i++) {
+      if(!listenerArray[i].removed && listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
+        return listenerArray[i]
+      }
+    }
+  }
+  return null
+};
+goog.events.hasListener = function(obj, opt_type, opt_capture) {
+  if(goog.events.Listenable.isImplementedBy(obj)) {
+    return obj.hasListener(opt_type, opt_capture)
+  }
+  var objUid = goog.getUid(obj), listeners = goog.events.sources_[objUid];
+  if(listeners) {
+    var hasType = goog.isDef(opt_type), hasCapture = goog.isDef(opt_capture);
+    if(hasType && hasCapture) {
+      var map = goog.events.listenerTree_[opt_type];
+      return!!map && !!map[opt_capture] && objUid in map[opt_capture]
+    }
+    return hasType || hasCapture ? goog.array.some(listeners, function(listener) {
+      return hasType && listener.type == opt_type || hasCapture && listener.capture == opt_capture
+    }) : !0
+  }
+  return!1
+};
+goog.events.expose = function(e) {
+  var str = [], key;
+  for(key in e) {
+    e[key] && e[key].id ? str.push(key + " = " + e[key] + " (" + e[key].id + ")") : str.push(key + " = " + e[key])
+  }
+  return str.join("\n")
+};
+goog.events.getOnString_ = function(type) {
+  return type in goog.events.onStringMap_ ? goog.events.onStringMap_[type] : goog.events.onStringMap_[type] = goog.events.onString_ + type
+};
+goog.events.fireListeners = function(obj, type, capture, eventObject) {
+  if(goog.events.Listenable.isImplementedBy(obj)) {
+    return obj.fireListeners(type, capture, eventObject)
+  }
+  var map = goog.events.listenerTree_;
+  return type in map && (map = map[type], capture in map) ? goog.events.fireListeners_(map[capture], obj, type, capture, eventObject) : !0
+};
+goog.events.fireListeners_ = function(map, obj, type, capture, eventObject) {
+  var retval = 1, objUid = goog.getUid(obj);
+  if(map[objUid]) {
+    for(var listenerArray = goog.array.clone(map[objUid]), i = 0;i < listenerArray.length;i++) {
+      var listener = listenerArray[i];
+      listener && !listener.removed && (retval &= !1 !== goog.events.fireListener(listener, eventObject))
+    }
+  }
+  return Boolean(retval)
+};
+goog.events.fireListener = function(listener, eventObject) {
+  var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
+  listener.callOnce && goog.events.unlistenByKey(listener);
+  return listenerFn.call(listenerHandler, eventObject)
+};
+goog.events.getTotalListenerCount = function() {
+  return goog.object.getCount(goog.events.listeners_)
+};
+goog.events.dispatchEvent = function(src, e) {
+  goog.asserts.assert(goog.events.Listenable.isImplementedBy(src), "Can not use goog.events.dispatchEvent with non-goog.events.Listenable instance.");
+  return src.dispatchEvent(e)
+};
+goog.events.protectBrowserEventEntryPoint = function(errorHandler) {
+  goog.events.handleBrowserEvent_ = errorHandler.protectEntryPoint(goog.events.handleBrowserEvent_)
+};
+goog.events.handleBrowserEvent_ = function(listener, opt_evt) {
+  if(listener.removed) {
+    return!0
+  }
+  var type = listener.type, map = goog.events.listenerTree_;
+  if(!(type in map)) {
+    return!0
+  }
+  var map = map[type], retval, targetsMap;
+  if(!goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT) {
+    var ieEvent = opt_evt || goog.getObjectByName("window.event"), hasCapture = !0 in map, hasBubble = !1 in map;
+    if(hasCapture) {
+      if(goog.events.isMarkedIeEvent_(ieEvent)) {
+        return!0
+      }
+      goog.events.markIeEvent_(ieEvent)
+    }
+    var evt = new goog.events.BrowserEvent(ieEvent, this);
+    retval = !0;
+    try {
+      if(hasCapture) {
+        for(var ancestors = [], parent = evt.currentTarget;parent;parent = parent.parentNode) {
+          ancestors.push(parent)
+        }
+        targetsMap = map[!0];
+        for(var i = ancestors.length - 1;!evt.propagationStopped_ && 0 <= i;i--) {
+          evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !0, evt)
+        }
+        if(hasBubble) {
+          for(targetsMap = map[!1], i = 0;!evt.propagationStopped_ && i < ancestors.length;i++) {
+            evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !1, evt)
+          }
+        }
+      }else {
+        retval = goog.events.fireListener(listener, evt)
+      }
+    }finally {
+      ancestors && (ancestors.length = 0)
+    }
+    return retval
+  }
+  return goog.events.fireListener(listener, new goog.events.BrowserEvent(opt_evt, this))
+};
+goog.events.markIeEvent_ = function(e) {
+  var useReturnValue = !1;
+  if(0 == e.keyCode) {
+    try {
+      e.keyCode = -1;
+      return
+    }catch(ex) {
+      useReturnValue = !0
+    }
+  }
+  if(useReturnValue || void 0 == e.returnValue) {
+    e.returnValue = !0
+  }
+};
+goog.events.isMarkedIeEvent_ = function(e) {
+  return 0 > e.keyCode || void 0 != e.returnValue
+};
+goog.events.uniqueIdCounter_ = 0;
+goog.events.getUniqueId = function(identifier) {
+  return identifier + "_" + goog.events.uniqueIdCounter_++
+};
+goog.events.LISTENER_WRAPPER_PROP_ = "__closure_events_fn_" + (1E9 * Math.random() >>> 0);
+goog.events.wrapListener_ = function(listener) {
+  goog.asserts.assert(listener, "Listener can not be null.");
+  if(goog.isFunction(listener)) {
+    return listener
+  }
+  goog.asserts.assert(listener.handleEvent, "An object listener must have handleEvent method.");
+  return listener[goog.events.LISTENER_WRAPPER_PROP_] || (listener[goog.events.LISTENER_WRAPPER_PROP_] = function(e) {
+    return listener.handleEvent(e)
+  })
+};
+goog.debug.entryPointRegistry.register(function(transformer) {
+  goog.events.handleBrowserEvent_ = transformer(goog.events.handleBrowserEvent_)
+});
+goog.events.ListenerMap = function(src) {
+  this.src = src;
+  this.listeners = {}
+};
+goog.events.ListenerMap.prototype.add = function(type, listener, callOnce, opt_useCapture, opt_listenerScope) {
+  var listenerArray = this.listeners[type];
+  listenerArray || (listenerArray = this.listeners[type] = []);
+  var listenerObj, index = goog.events.ListenerMap.findListenerIndex_(listenerArray, listener, opt_useCapture, opt_listenerScope);
+  -1 < index ? (listenerObj = listenerArray[index], callOnce || (listenerObj.callOnce = !1)) : (listenerObj = new goog.events.Listener(listener, null, this.src, type, !!opt_useCapture, opt_listenerScope), listenerObj.callOnce = callOnce, listenerArray.push(listenerObj));
+  return listenerObj
+};
+goog.events.ListenerMap.prototype.remove = function(type, listener, opt_useCapture, opt_listenerScope) {
+  if(!(type in this.listeners)) {
+    return!1
+  }
+  var listenerArray = this.listeners[type], index = goog.events.ListenerMap.findListenerIndex_(listenerArray, listener, opt_useCapture, opt_listenerScope);
+  if(-1 < index) {
+    var listenerObj = listenerArray[index];
+    listenerObj.markAsRemoved();
+    goog.array.removeAt(listenerArray, index);
+    0 == listenerArray.length && delete this.listeners[type];
+    return!0
+  }
+  return!1
+};
+goog.events.ListenerMap.prototype.removeByKey = function(listener) {
+  var type = listener.type;
+  if(!(type in this.listeners)) {
+    return!1
+  }
+  var removed = goog.array.remove(this.listeners[type], listener);
+  removed && (listener.markAsRemoved(), 0 == this.listeners[type].length && delete this.listeners[type]);
+  return removed
+};
+goog.events.ListenerMap.prototype.removeAll = function(opt_type) {
+  var count = 0, type;
+  for(type in this.listeners) {
+    if(!opt_type || type == opt_type) {
+      for(var listenerArray = this.listeners[type], i = 0;i < listenerArray.length;i++) {
+        ++count, listenerArray[i].markAsRemoved()
+      }
+      delete this.listeners[type]
+    }
+  }
+  return count
+};
+goog.events.ListenerMap.prototype.getListeners = function(type, capture) {
+  var listenerArray = this.listeners[type], rv = [];
+  if(listenerArray) {
+    for(var i = 0;i < listenerArray.length;++i) {
+      var listenerObj = listenerArray[i];
+      listenerObj.capture == capture && rv.push(listenerObj)
+    }
+  }
+  return rv
+};
+goog.events.ListenerMap.prototype.getListener = function(type, listener, capture, opt_listenerScope) {
+  var listenerArray = this.listeners[type], i = -1;
+  listenerArray && (i = goog.events.ListenerMap.findListenerIndex_(listenerArray, listener, capture, opt_listenerScope));
+  return-1 < i ? listenerArray[i] : null
+};
+goog.events.ListenerMap.prototype.hasListener = function(opt_type, opt_capture) {
+  var hasType = goog.isDef(opt_type), hasCapture = goog.isDef(opt_capture);
+  return goog.object.some(this.listeners, function(listenerArray) {
+    for(var i = 0;i < listenerArray.length;++i) {
+      if(!(hasType && listenerArray[i].type != opt_type || hasCapture && listenerArray[i].capture != opt_capture)) {
+        return!0
+      }
+    }
+    return!1
+  })
+};
+goog.events.ListenerMap.findListenerIndex_ = function(listenerArray, listener, opt_useCapture, opt_listenerScope) {
+  for(var i = 0;i < listenerArray.length;++i) {
+    var listenerObj = listenerArray[i];
+    if(!listenerObj.removed && listenerObj.listener == listener && listenerObj.capture == !!opt_useCapture && listenerObj.handler == opt_listenerScope) {
+      return i
+    }
+  }
+  return-1
+};
+goog.events.EventTarget = function() {
+  goog.Disposable.call(this);
+  this.eventTargetListeners_ = new goog.events.ListenerMap(this);
+  this.actualEventTarget_ = this
+};
+goog.inherits(goog.events.EventTarget, goog.Disposable);
+goog.events.Listenable.addImplementation(goog.events.EventTarget);
+goog.events.EventTarget.MAX_ANCESTORS_ = 1E3;
+goog.events.EventTarget.prototype.parentEventTarget_ = null;
+goog.events.EventTarget.prototype.getParentEventTarget = function() {
+  return this.parentEventTarget_
+};
+goog.events.EventTarget.prototype.addEventListener = function(type, handler, opt_capture, opt_handlerScope) {
+  goog.events.listen(this, type, handler, opt_capture, opt_handlerScope)
+};
+goog.events.EventTarget.prototype.removeEventListener = function(type, handler, opt_capture, opt_handlerScope) {
+  goog.events.unlisten(this, type, handler, opt_capture, opt_handlerScope)
+};
+goog.events.EventTarget.prototype.dispatchEvent = function(e) {
+  this.assertInitialized_();
+  var ancestorsTree, ancestor = this.getParentEventTarget();
+  if(ancestor) {
+    ancestorsTree = [];
+    for(var ancestorCount = 1;ancestor;ancestor = ancestor.getParentEventTarget()) {
+      ancestorsTree.push(ancestor), goog.asserts.assert(++ancestorCount < goog.events.EventTarget.MAX_ANCESTORS_, "infinite loop")
+    }
+  }
+  return goog.events.EventTarget.dispatchEventInternal_(this.actualEventTarget_, e, ancestorsTree)
+};
+goog.events.EventTarget.prototype.disposeInternal = function() {
+  goog.events.EventTarget.superClass_.disposeInternal.call(this);
+  this.removeAllListeners();
+  this.parentEventTarget_ = null
+};
+goog.events.EventTarget.prototype.listen = function(type, listener, opt_useCapture, opt_listenerScope) {
+  this.assertInitialized_();
+  return this.eventTargetListeners_.add(type, listener, !1, opt_useCapture, opt_listenerScope)
+};
+goog.events.EventTarget.prototype.listenOnce = function(type, listener, opt_useCapture, opt_listenerScope) {
+  return this.eventTargetListeners_.add(type, listener, !0, opt_useCapture, opt_listenerScope)
+};
+goog.events.EventTarget.prototype.unlisten = function(type, listener, opt_useCapture, opt_listenerScope) {
+  return this.eventTargetListeners_.remove(type, listener, opt_useCapture, opt_listenerScope)
+};
+goog.events.EventTarget.prototype.unlistenByKey = function(key) {
+  return this.eventTargetListeners_.removeByKey(key)
+};
+goog.events.EventTarget.prototype.removeAllListeners = function(opt_type) {
+  return this.eventTargetListeners_ ? this.eventTargetListeners_.removeAll(opt_type) : 0
+};
+goog.events.EventTarget.prototype.fireListeners = function(type, capture, eventObject) {
+  var listenerArray = this.eventTargetListeners_.listeners[type];
+  if(!listenerArray) {
+    return!0
+  }
+  for(var listenerArray = goog.array.clone(listenerArray), rv = !0, i = 0;i < listenerArray.length;++i) {
+    var listener = listenerArray[i];
+    if(listener && !listener.removed && listener.capture == capture) {
+      var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
+      listener.callOnce && this.unlistenByKey(listener);
+      rv = !1 !== listenerFn.call(listenerHandler, eventObject) && rv
+    }
+  }
+  return rv && !1 != eventObject.returnValue_
+};
+goog.events.EventTarget.prototype.getListeners = function(type, capture) {
+  return this.eventTargetListeners_.getListeners(type, capture)
+};
+goog.events.EventTarget.prototype.getListener = function(type, listener, capture, opt_listenerScope) {
+  return this.eventTargetListeners_.getListener(type, listener, capture, opt_listenerScope)
+};
+goog.events.EventTarget.prototype.hasListener = function(opt_type, opt_capture) {
+  return this.eventTargetListeners_.hasListener(opt_type, opt_capture)
+};
+goog.events.EventTarget.prototype.assertInitialized_ = function() {
+  goog.asserts.assert(this.eventTargetListeners_, "Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")
+};
+goog.events.EventTarget.dispatchEventInternal_ = function(target, e, opt_ancestorsTree) {
+  var type = e.type || e;
+  if(goog.isString(e)) {
+    e = new goog.events.Event(e, target)
+  }else {
+    if(e instanceof goog.events.Event) {
+      e.target = e.target || target
+    }else {
+      var oldEvent = e;
+      e = new goog.events.Event(type, target);
+      goog.object.extend(e, oldEvent)
+    }
+  }
+  var rv = !0, currentTarget;
+  if(opt_ancestorsTree) {
+    for(var i = opt_ancestorsTree.length - 1;!e.propagationStopped_ && 0 <= i;i--) {
+      currentTarget = e.currentTarget = opt_ancestorsTree[i], rv = currentTarget.fireListeners(type, !0, e) && rv
+    }
+  }
+  e.propagationStopped_ || (currentTarget = e.currentTarget = target, rv = currentTarget.fireListeners(type, !0, e) && rv, e.propagationStopped_ || (rv = currentTarget.fireListeners(type, !1, e) && rv));
+  if(opt_ancestorsTree) {
+    for(i = 0;!e.propagationStopped_ && i < opt_ancestorsTree.length;i++) {
+      currentTarget = e.currentTarget = opt_ancestorsTree[i], rv = currentTarget.fireListeners(type, !1, e) && rv
+    }
+  }
+  return rv
+};
+goog.json = {};
+goog.json.isValid_ = function(s) {
+  if(/^\s*$/.test(s)) {
+    return!1
+  }
+  var backslashesRe = /\\["\\\/bfnrtu]/g, simpleValuesRe = /"[^"\\\n\r\u2028\u2029\x00-\x08\x0a-\x1f]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, openBracketsRe = /(?:^|:|,)(?:[\s\u2028\u2029]*\[)+/g, remainderRe = /^[\],:{}\s\u2028\u2029]*$/;
+  return remainderRe.test(s.replace(backslashesRe, "@").replace(simpleValuesRe, "]").replace(openBracketsRe, ""))
+};
+goog.json.parse = function(s) {
+  var o = String(s);
+  if(goog.json.isValid_(o)) {
+    try {
+      return eval("(" + o + ")")
+    }catch(ex) {
+    }
+  }
+  throw Error("Invalid JSON string: " + o);
+};
+goog.json.unsafeParse = function(s) {
+  return eval("(" + s + ")")
+};
+goog.json.serialize = function(object, opt_replacer) {
+  return(new goog.json.Serializer(opt_replacer)).serialize(object)
+};
+goog.json.Serializer = function(opt_replacer) {
+  this.replacer_ = opt_replacer
+};
+goog.json.Serializer.prototype.serialize = function(object) {
+  var sb = [];
+  this.serialize_(object, sb);
+  return sb.join("")
+};
+goog.json.Serializer.prototype.serialize_ = function(object, sb) {
+  switch(typeof object) {
+    case "string":
+      this.serializeString_(object, sb);
+      break;
+    case "number":
+      this.serializeNumber_(object, sb);
+      break;
+    case "boolean":
+      sb.push(object);
+      break;
+    case "undefined":
+      sb.push("null");
+      break;
+    case "object":
+      if(null == object) {
+        sb.push("null");
+        break
+      }
+      if(goog.isArray(object)) {
+        this.serializeArray(object, sb);
+        break
+      }
+      this.serializeObject_(object, sb);
+      break;
+    case "function":
+      break;
+    default:
+      throw Error("Unknown type: " + typeof object);
+  }
+};
+goog.json.Serializer.charToJsonCharCache_ = {'"':'\\"', "\\":"\\\\", "/":"\\/", "\b":"\\b", "\f":"\\f", "\n":"\\n", "\r":"\\r", "\t":"\\t", "\x0B":"\\u000b"};
+goog.json.Serializer.charsToReplace_ = /\uffff/.test("\uffff") ? /[\\\"\x00-\x1f\x7f-\uffff]/g : /[\\\"\x00-\x1f\x7f-\xff]/g;
+goog.json.Serializer.prototype.serializeString_ = function(s, sb) {
+  sb.push('"', s.replace(goog.json.Serializer.charsToReplace_, function(c) {
+    if(c in goog.json.Serializer.charToJsonCharCache_) {
+      return goog.json.Serializer.charToJsonCharCache_[c]
+    }
+    var cc = c.charCodeAt(0), rv = "\\u";
+    16 > cc ? rv += "000" : 256 > cc ? rv += "00" : 4096 > cc && (rv += "0");
+    return goog.json.Serializer.charToJsonCharCache_[c] = rv + cc.toString(16)
+  }), '"')
+};
+goog.json.Serializer.prototype.serializeNumber_ = function(n, sb) {
+  sb.push(isFinite(n) && !isNaN(n) ? n : "null")
+};
+goog.json.Serializer.prototype.serializeArray = function(arr, sb) {
+  var l = arr.length;
+  sb.push("[");
+  for(var sep = "", i = 0;i < l;i++) {
+    sb.push(sep);
+    var value = arr[i];
+    this.serialize_(this.replacer_ ? this.replacer_.call(arr, String(i), value) : value, sb);
+    sep = ","
+  }
+  sb.push("]")
+};
+goog.json.Serializer.prototype.serializeObject_ = function(obj, sb) {
+  sb.push("{");
+  var sep = "", key;
+  for(key in obj) {
+    if(Object.prototype.hasOwnProperty.call(obj, key)) {
+      var value = obj[key];
+      "function" != typeof value && (sb.push(sep), this.serializeString_(key, sb), sb.push(":"), this.serialize_(this.replacer_ ? this.replacer_.call(obj, key, value) : value, sb), sep = ",")
+    }
+  }
+  sb.push("}")
+};
 goog.structs = {};
 goog.structs.Collection = function() {
 };
@@ -3218,7 +4034,7 @@
   }catch(e$$0) {
     fileName = "Not available", threwError = !0
   }
-  return!threwError && err.lineNumber && err.fileName && err.stack ? err : {message:err.message, name:err.name, lineNumber:lineNumber, fileName:fileName, stack:err.stack || "Not available"}
+  return!threwError && err.lineNumber && err.fileName && err.stack && err.message && err.name ? err : {message:err.message || "Not available", name:err.name || "UnknownError", lineNumber:lineNumber, fileName:fileName, stack:err.stack || "Not available"}
 };
 goog.debug.enhanceError = function(err, opt_message) {
   var error = "string" == typeof err ? Error(err) : err;
@@ -3444,6 +4260,16 @@
 goog.debug.Logger.prototype.getName = function() {
   return this.name_
 };
+goog.debug.Logger.prototype.addHandler = function(handler) {
+  goog.debug.LOGGING_ENABLED && (goog.debug.Logger.ENABLE_HIERARCHY ? (this.handlers_ || (this.handlers_ = []), this.handlers_.push(handler)) : (goog.asserts.assert(!this.name_, "Cannot call addHandler on a non-root logger when goog.debug.Logger.ENABLE_HIERARCHY is false."), goog.debug.Logger.rootHandlers_.push(handler)))
+};
+goog.debug.Logger.prototype.removeHandler = function(handler) {
+  if(goog.debug.LOGGING_ENABLED) {
+    var handlers = goog.debug.Logger.ENABLE_HIERARCHY ? this.handlers_ : goog.debug.Logger.rootHandlers_;
+    return!!handlers && goog.array.remove(handlers, handler)
+  }
+  return!1
+};
 goog.debug.Logger.prototype.getParent = function() {
   return this.parent_
 };
@@ -3551,816 +4377,39 @@
   }
   return goog.debug.LogManager.loggers_[name] = logger
 };
-goog.disposable = {};
-goog.disposable.IDisposable = function() {
-};
-goog.Disposable = function() {
-  goog.Disposable.MONITORING_MODE != goog.Disposable.MonitoringMode.OFF && (goog.Disposable.instances_[goog.getUid(this)] = this)
-};
-goog.Disposable.MonitoringMode = {OFF:0, PERMANENT:1, INTERACTIVE:2};
-goog.Disposable.MONITORING_MODE = 0;
-goog.Disposable.instances_ = {};
-goog.Disposable.getUndisposedObjects = function() {
-  var ret = [], id;
-  for(id in goog.Disposable.instances_) {
-    goog.Disposable.instances_.hasOwnProperty(id) && ret.push(goog.Disposable.instances_[Number(id)])
-  }
-  return ret
-};
-goog.Disposable.clearUndisposedObjects = function() {
-  goog.Disposable.instances_ = {}
-};
-goog.Disposable.prototype.disposed_ = !1;
-goog.Disposable.prototype.isDisposed = function() {
-  return this.disposed_
-};
-goog.Disposable.prototype.dispose = function() {
-  if(!this.disposed_ && (this.disposed_ = !0, this.disposeInternal(), goog.Disposable.MONITORING_MODE != goog.Disposable.MonitoringMode.OFF)) {
-    var uid = goog.getUid(this);
-    if(goog.Disposable.MONITORING_MODE == goog.Disposable.MonitoringMode.PERMANENT && !goog.Disposable.instances_.hasOwnProperty(uid)) {
-      throw Error(this + " did not call the goog.Disposable base constructor or was disposed of after a clearUndisposedObjects call");
-    }
-    delete goog.Disposable.instances_[uid]
-  }
-};
-goog.Disposable.prototype.disposeInternal = function() {
-  if(this.onDisposeCallbacks_) {
-    for(;this.onDisposeCallbacks_.length;) {
-      this.onDisposeCallbacks_.shift()()
-    }
-  }
-};
-goog.Disposable.isDisposed = function(obj) {
-  return obj && "function" == typeof obj.isDisposed ? obj.isDisposed() : !1
-};
-goog.dispose = function(obj) {
-  obj && "function" == typeof obj.dispose && obj.dispose()
-};
-goog.disposeAll = function(var_args) {
-  for(var i = 0, len = arguments.length;i < len;++i) {
-    var disposable = arguments[i];
-    goog.isArrayLike(disposable) ? goog.disposeAll.apply(null, disposable) : goog.dispose(disposable)
-  }
-};
-goog.events = {};
-goog.events.Event = function(type, opt_target) {
-  this.type = type;
-  this.currentTarget = this.target = opt_target
-};
-goog.events.Event.prototype.disposeInternal = function() {
-};
-goog.events.Event.prototype.dispose = function() {
-};
-goog.events.Event.prototype.propagationStopped_ = !1;
-goog.events.Event.prototype.defaultPrevented = !1;
-goog.events.Event.prototype.returnValue_ = !0;
-goog.events.Event.prototype.stopPropagation = function() {
-  this.propagationStopped_ = !0
-};
-goog.events.Event.prototype.preventDefault = function() {
-  this.defaultPrevented = !0;
-  this.returnValue_ = !1
-};
-goog.events.Event.stopPropagation = function(e) {
-  e.stopPropagation()
-};
-goog.events.Event.preventDefault = function(e) {
-  e.preventDefault()
-};
-goog.reflect = {};
-goog.reflect.object = function(type, object) {
-  return object
-};
-goog.reflect.sinkValue = function(x) {
-  goog.reflect.sinkValue[" "](x);
-  return x
-};
-goog.reflect.sinkValue[" "] = goog.nullFunction;
-goog.reflect.canAccessProperty = function(obj, prop) {
-  try {
-    return goog.reflect.sinkValue(obj[prop]), !0
-  }catch(e) {
-  }
-  return!1
-};
-goog.events.BrowserFeature = {HAS_W3C_BUTTON:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), HAS_W3C_EVENT_SUPPORT:!goog.userAgent.IE || goog.userAgent.isDocumentModeOrHigher(9), SET_KEY_CODE_TO_PREVENT_DEFAULT:goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), HAS_NAVIGATOR_ONLINE_PROPERTY:!goog.userAgent.WEBKIT || goog.userAgent.isVersionOrHigher("528"), HAS_HTML5_NETWORK_EVENT_SUPPORT:goog.userAgent.GECKO && goog.userAgent.isVersionOrHigher("1.9b") || goog.userAgent.IE && 
-goog.userAgent.isVersionOrHigher("8") || goog.userAgent.OPERA && goog.userAgent.isVersionOrHigher("9.5") || goog.userAgent.WEBKIT && goog.userAgent.isVersionOrHigher("528"), HTML5_NETWORK_EVENTS_FIRE_ON_BODY:goog.userAgent.GECKO && !goog.userAgent.isVersionOrHigher("8") || goog.userAgent.IE && !goog.userAgent.isVersionOrHigher("9"), TOUCH_ENABLED:"ontouchstart" in goog.global || !!(goog.global.document && document.documentElement && "ontouchstart" in document.documentElement) || !(!goog.global.navigator || 
-!goog.global.navigator.msMaxTouchPoints)};
-goog.events.EventType = {CLICK:"click", DBLCLICK:"dblclick", MOUSEDOWN:"mousedown", MOUSEUP:"mouseup", MOUSEOVER:"mouseover", MOUSEOUT:"mouseout", MOUSEMOVE:"mousemove", SELECTSTART:"selectstart", KEYPRESS:"keypress", KEYDOWN:"keydown", KEYUP:"keyup", BLUR:"blur", FOCUS:"focus", DEACTIVATE:"deactivate", FOCUSIN:goog.userAgent.IE ? "focusin" : "DOMFocusIn", FOCUSOUT:goog.userAgent.IE ? "focusout" : "DOMFocusOut", CHANGE:"change", SELECT:"select", SUBMIT:"submit", INPUT:"input", PROPERTYCHANGE:"propertychange", 
-DRAGSTART:"dragstart", DRAG:"drag", DRAGENTER:"dragenter", DRAGOVER:"dragover", DRAGLEAVE:"dragleave", DROP:"drop", DRAGEND:"dragend", TOUCHSTART:"touchstart", TOUCHMOVE:"touchmove", TOUCHEND:"touchend", TOUCHCANCEL:"touchcancel", BEFOREUNLOAD:"beforeunload", CONTEXTMENU:"contextmenu", DOMCONTENTLOADED:"DOMContentLoaded", ERROR:"error", HELP:"help", LOAD:"load", LOSECAPTURE:"losecapture", READYSTATECHANGE:"readystatechange", RESIZE:"resize", SCROLL:"scroll", UNLOAD:"unload", HASHCHANGE:"hashchange", 
-PAGEHIDE:"pagehide", PAGESHOW:"pageshow", POPSTATE:"popstate", COPY:"copy", PASTE:"paste", CUT:"cut", BEFORECOPY:"beforecopy", BEFORECUT:"beforecut", BEFOREPASTE:"beforepaste", ONLINE:"online", OFFLINE:"offline", MESSAGE:"message", CONNECT:"connect", TRANSITIONEND:goog.userAgent.WEBKIT ? "webkitTransitionEnd" : goog.userAgent.OPERA ? "oTransitionEnd" : "transitionend", MSGESTURECHANGE:"MSGestureChange", MSGESTUREEND:"MSGestureEnd", MSGESTUREHOLD:"MSGestureHold", MSGESTURESTART:"MSGestureStart", MSGESTURETAP:"MSGestureTap", 
-MSGOTPOINTERCAPTURE:"MSGotPointerCapture", MSINERTIASTART:"MSInertiaStart", MSLOSTPOINTERCAPTURE:"MSLostPointerCapture", MSPOINTERCANCEL:"MSPointerCancel", MSPOINTERDOWN:"MSPointerDown", MSPOINTERMOVE:"MSPointerMove", MSPOINTEROVER:"MSPointerOver", MSPOINTEROUT:"MSPointerOut", MSPOINTERUP:"MSPointerUp", TEXTINPUT:"textinput", COMPOSITIONSTART:"compositionstart", COMPOSITIONUPDATE:"compositionupdate", COMPOSITIONEND:"compositionend"};
-goog.events.BrowserEvent = function(opt_e, opt_currentTarget) {
-  opt_e && this.init(opt_e, opt_currentTarget)
-};
-goog.inherits(goog.events.BrowserEvent, goog.events.Event);
-goog.events.BrowserEvent.MouseButton = {LEFT:0, MIDDLE:1, RIGHT:2};
-goog.events.BrowserEvent.IEButtonMap = [1, 4, 2];
-goog.events.BrowserEvent.prototype.target = null;
-goog.events.BrowserEvent.prototype.relatedTarget = null;
-goog.events.BrowserEvent.prototype.offsetX = 0;
-goog.events.BrowserEvent.prototype.offsetY = 0;
-goog.events.BrowserEvent.prototype.clientX = 0;
-goog.events.BrowserEvent.prototype.clientY = 0;
-goog.events.BrowserEvent.prototype.screenX = 0;
-goog.events.BrowserEvent.prototype.screenY = 0;
-goog.events.BrowserEvent.prototype.button = 0;
-goog.events.BrowserEvent.prototype.keyCode = 0;
-goog.events.BrowserEvent.prototype.charCode = 0;
-goog.events.BrowserEvent.prototype.ctrlKey = !1;
-goog.events.BrowserEvent.prototype.altKey = !1;
-goog.events.BrowserEvent.prototype.shiftKey = !1;
-goog.events.BrowserEvent.prototype.metaKey = !1;
-goog.events.BrowserEvent.prototype.event_ = null;
-goog.events.BrowserEvent.prototype.init = function(e, opt_currentTarget) {
-  var type = this.type = e.type;
-  goog.events.Event.call(this, type);
-  this.target = e.target || e.srcElement;
-  this.currentTarget = opt_currentTarget;
-  var relatedTarget = e.relatedTarget;
-  relatedTarget ? goog.userAgent.GECKO && (goog.reflect.canAccessProperty(relatedTarget, "nodeName") || (relatedTarget = null)) : type == goog.events.EventType.MOUSEOVER ? relatedTarget = e.fromElement : type == goog.events.EventType.MOUSEOUT && (relatedTarget = e.toElement);
-  this.relatedTarget = relatedTarget;
-  this.offsetX = goog.userAgent.WEBKIT || void 0 !== e.offsetX ? e.offsetX : e.layerX;
-  this.offsetY = goog.userAgent.WEBKIT || void 0 !== e.offsetY ? e.offsetY : e.layerY;
-  this.clientX = void 0 !== e.clientX ? e.clientX : e.pageX;
-  this.clientY = void 0 !== e.clientY ? e.clientY : e.pageY;
-  this.screenX = e.screenX || 0;
-  this.screenY = e.screenY || 0;
-  this.button = e.button;
-  this.keyCode = e.keyCode || 0;
-  this.charCode = e.charCode || ("keypress" == type ? e.keyCode : 0);
-  this.ctrlKey = e.ctrlKey;
-  this.altKey = e.altKey;
-  this.shiftKey = e.shiftKey;
-  this.metaKey = e.metaKey;
-  this.state = e.state;
-  this.event_ = e;
-  e.defaultPrevented && this.preventDefault();
-  delete this.propagationStopped_
-};
-goog.events.BrowserEvent.prototype.stopPropagation = function() {
-  goog.events.BrowserEvent.superClass_.stopPropagation.call(this);
-  this.event_.stopPropagation ? this.event_.stopPropagation() : this.event_.cancelBubble = !0
-};
-goog.events.BrowserEvent.prototype.preventDefault = function() {
-  goog.events.BrowserEvent.superClass_.preventDefault.call(this);
-  var be = this.event_;
-  if(be.preventDefault) {
-    be.preventDefault()
-  }else {
-    if(be.returnValue = !1, goog.events.BrowserFeature.SET_KEY_CODE_TO_PREVENT_DEFAULT) {
-      try {
-        if(be.ctrlKey || 112 <= be.keyCode && 123 >= be.keyCode) {
-          be.keyCode = -1
-        }
-      }catch(ex) {
-      }
-    }
-  }
-};
-goog.events.BrowserEvent.prototype.disposeInternal = function() {
-};
-goog.events.Listenable = function() {
-};
-goog.events.Listenable.IMPLEMENTED_BY_PROP = "closure_listenable_" + (1E6 * Math.random() | 0);
-goog.events.Listenable.addImplementation = function(cls) {
-  cls.prototype[goog.events.Listenable.IMPLEMENTED_BY_PROP] = !0
-};
-goog.events.Listenable.isImplementedBy = function(obj) {
-  return!(!obj || !obj[goog.events.Listenable.IMPLEMENTED_BY_PROP])
-};
-goog.events.ListenableKey = function() {
-};
-goog.events.ListenableKey.counter_ = 0;
-goog.events.ListenableKey.reserveKey = function() {
-  return++goog.events.ListenableKey.counter_
-};
-goog.events.Listener = function(listener, proxy, src, type, capture, opt_handler) {
-  this.listener = listener;
-  this.proxy = proxy;
-  this.src = src;
-  this.type = type;
-  this.capture = !!capture;
-  this.handler = opt_handler;
-  this.key = goog.events.ListenableKey.reserveKey();
-  this.removed = this.callOnce = !1
-};
-goog.events.Listener.ENABLE_MONITORING = !1;
-goog.events.Listener.prototype.markAsRemoved = function() {
-  this.removed = !0;
-  this.handler = this.src = this.proxy = this.listener = null
-};
-goog.events.listeners_ = {};
-goog.events.listenerTree_ = {};
-goog.events.sources_ = {};
-goog.events.onString_ = "on";
-goog.events.onStringMap_ = {};
-goog.events.keySeparator_ = "_";
-goog.events.listen = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.listen(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  var listenableKey;
-  listener = goog.events.wrapListener_(listener);
-  return listenableKey = goog.events.Listenable.isImplementedBy(src) ? src.listen(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !1, opt_capt, opt_handler)
-};
-goog.events.listen_ = function(src, type, listener, callOnce, opt_capt, opt_handler) {
-  if(!type) {
-    throw Error("Invalid event type");
-  }
-  var capture = !!opt_capt, map = goog.events.listenerTree_;
-  type in map || (map[type] = {count_:0, remaining_:0});
-  map = map[type];
-  capture in map || (map[capture] = {count_:0, remaining_:0}, map.count_++);
-  var map = map[capture], srcUid = goog.getUid(src), listenerArray, listenerObj;
-  map.remaining_++;
-  if(map[srcUid]) {
-    listenerArray = map[srcUid];
-    for(var i = 0;i < listenerArray.length;i++) {
-      if(listenerObj = listenerArray[i], listenerObj.listener == listener && listenerObj.handler == opt_handler) {
-        if(listenerObj.removed) {
-          break
-        }
-        callOnce || (listenerArray[i].callOnce = !1);
-        return listenerArray[i]
-      }
-    }
-  }else {
-    listenerArray = map[srcUid] = [], map.count_++
-  }
-  var proxy = goog.events.getProxy();
-  listenerObj = new goog.events.Listener(listener, proxy, src, type, capture, opt_handler);
-  listenerObj.callOnce = callOnce;
-  proxy.src = src;
-  proxy.listener = listenerObj;
-  listenerArray.push(listenerObj);
-  goog.events.sources_[srcUid] || (goog.events.sources_[srcUid] = []);
-  goog.events.sources_[srcUid].push(listenerObj);
-  src.addEventListener ? src.addEventListener(type, proxy, capture) : src.attachEvent(goog.events.getOnString_(type), proxy);
-  var key = listenerObj.key;
-  return goog.events.listeners_[key] = listenerObj
-};
-goog.events.getProxy = function() {
-  var proxyCallbackFunction = goog.events.handleBrowserEvent_, f = goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT ? function(eventObject) {
-    return proxyCallbackFunction.call(f.src, f.listener, eventObject)
-  } : function(eventObject) {
-    var v = proxyCallbackFunction.call(f.src, f.listener, eventObject);
-    if(!v) {
-      return v
-    }
-  };
-  return f
-};
-goog.events.listenOnce = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.listenOnce(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  var listenableKey;
-  listener = goog.events.wrapListener_(listener);
-  return listenableKey = goog.events.Listenable.isImplementedBy(src) ? src.listenOnce(type, listener, opt_capt, opt_handler) : goog.events.listen_(src, type, listener, !0, opt_capt, opt_handler)
-};
-goog.events.listenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
-  wrapper.listen(src, listener, opt_capt, opt_handler)
-};
-goog.events.unlisten = function(src, type, listener, opt_capt, opt_handler) {
-  if(goog.isArray(type)) {
-    for(var i = 0;i < type.length;i++) {
-      goog.events.unlisten(src, type[i], listener, opt_capt, opt_handler)
-    }
-    return null
-  }
-  listener = goog.events.wrapListener_(listener);
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.unlisten(type, listener, opt_capt, opt_handler)
-  }
-  var capture = !!opt_capt, listenerArray = goog.events.getListeners_(src, type, capture);
-  if(!listenerArray) {
-    return!1
-  }
-  for(i = 0;i < listenerArray.length;i++) {
-    if(listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
-      return goog.events.unlistenByKey(listenerArray[i])
-    }
-  }
-  return!1
-};
-goog.events.unlistenByKey = function(key) {
-  if(goog.isNumber(key)) {
-    return!1
-  }
-  var listener = key;
-  if(!listener || listener.removed) {
-    return!1
-  }
-  var src = listener.src;
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.unlistenByKey(listener)
-  }
-  var type = listener.type, proxy = listener.proxy, capture = listener.capture;
-  src.removeEventListener ? src.removeEventListener(type, proxy, capture) : src.detachEvent && src.detachEvent(goog.events.getOnString_(type), proxy);
-  var srcUid = goog.getUid(src);
-  if(goog.events.sources_[srcUid]) {
-    var sourcesArray = goog.events.sources_[srcUid];
-    goog.array.remove(sourcesArray, listener);
-    0 == sourcesArray.length && delete goog.events.sources_[srcUid]
-  }
-  listener.markAsRemoved();
-  var listenerArray = goog.events.listenerTree_[type][capture][srcUid];
-  listenerArray && (listenerArray.needsCleanup_ = !0, goog.events.cleanUp_(type, capture, srcUid, listenerArray));
-  delete goog.events.listeners_[listener.key];
-  return!0
-};
-goog.events.unlistenWithWrapper = function(src, wrapper, listener, opt_capt, opt_handler) {
-  wrapper.unlisten(src, listener, opt_capt, opt_handler)
-};
-goog.events.cleanUp = function(listenableKey) {
-  delete goog.events.listeners_[listenableKey.key]
-};
-goog.events.cleanUp_ = function(type, capture, srcUid, listenerArray) {
-  if(!listenerArray.locked_ && listenerArray.needsCleanup_) {
-    for(var oldIndex = 0, newIndex = 0;oldIndex < listenerArray.length;oldIndex++) {
-      listenerArray[oldIndex].removed || (oldIndex != newIndex && (listenerArray[newIndex] = listenerArray[oldIndex]), newIndex++)
-    }
-    listenerArray.length = newIndex;
-    listenerArray.needsCleanup_ = !1;
-    0 == newIndex && (delete goog.events.listenerTree_[type][capture][srcUid], goog.events.listenerTree_[type][capture].count_--, 0 == goog.events.listenerTree_[type][capture].count_ && (delete goog.events.listenerTree_[type][capture], goog.events.listenerTree_[type].count_--), 0 == goog.events.listenerTree_[type].count_ && delete goog.events.listenerTree_[type])
-  }
-};
-goog.events.removeAll = function(opt_obj, opt_type) {
-  var count = 0, noObj = null == opt_obj, noType = null == opt_type;
-  if(noObj) {
-    goog.object.forEach(goog.events.listeners_, function(listener) {
-      goog.events.unlistenByKey(listener);
-      count++
-    })
-  }else {
-    if(opt_obj && goog.events.Listenable.isImplementedBy(opt_obj)) {
-      return opt_obj.removeAllListeners(opt_type)
-    }
-    var srcUid = goog.getUid(opt_obj);
-    if(goog.events.sources_[srcUid]) {
-      for(var sourcesArray = goog.events.sources_[srcUid], i = sourcesArray.length - 1;0 <= i;i--) {
-        var listener$$0 = sourcesArray[i];
-        if(noType || opt_type == listener$$0.type) {
-          goog.events.unlistenByKey(listener$$0), count++
-        }
-      }
-    }
-  }
-  return count
-};
-goog.events.removeAllNativeListeners = function() {
-  var count = 0;
-  goog.object.forEach(goog.events.listeners_, function(listener) {
-    var src = listener.src;
-    goog.events.Listenable.isImplementedBy(src) || (goog.events.unlistenByKey(listener), count++)
-  });
-  return count
-};
-goog.events.getListeners = function(obj, type, capture) {
-  return goog.events.Listenable.isImplementedBy(obj) ? obj.getListeners(type, capture) : goog.events.getListeners_(obj, type, capture) || []
-};
-goog.events.getListeners_ = function(obj, type, capture) {
-  var map = goog.events.listenerTree_;
-  if(type in map && (map = map[type], capture in map)) {
-    var map = map[capture], objUid = goog.getUid(obj);
-    if(map[objUid]) {
-      return map[objUid]
-    }
+goog.log = {};
+goog.log.ENABLED = goog.debug.LOGGING_ENABLED;
+goog.log.Logger = goog.debug.Logger;
+goog.log.Level = goog.debug.Logger.Level;
+goog.log.LogRecord = goog.debug.LogRecord;
+goog.log.getLogger = function(name, opt_level) {
+  if(goog.log.ENABLED) {
+    var logger = goog.debug.Logger.getLogger(name);
+    opt_level && logger && logger.setLevel(opt_level);
+    return logger
   }
   return null
 };
-goog.events.getListener = function(src, type, listener, opt_capt, opt_handler) {
-  var capture = !!opt_capt;
-  listener = goog.events.wrapListener_(listener);
-  if(goog.events.Listenable.isImplementedBy(src)) {
-    return src.getListener(type, listener, capture, opt_handler)
-  }
-  var listenerArray = goog.events.getListeners_(src, type, capture);
-  if(listenerArray) {
-    for(var i = 0;i < listenerArray.length;i++) {
-      if(!listenerArray[i].removed && listenerArray[i].listener == listener && listenerArray[i].capture == capture && listenerArray[i].handler == opt_handler) {
-        return listenerArray[i]
-      }
-    }
-  }
-  return null
+goog.log.addHandler = function(logger, handler) {
+  goog.log.ENABLED && logger && logger.addHandler(handler)
 };
-goog.events.hasListener = function(obj, opt_type, opt_capture) {
-  if(goog.events.Listenable.isImplementedBy(obj)) {
-    return obj.hasListener(opt_type, opt_capture)
-  }
-  var objUid = goog.getUid(obj), listeners = goog.events.sources_[objUid];
-  if(listeners) {
-    var hasType = goog.isDef(opt_type), hasCapture = goog.isDef(opt_capture);
-    if(hasType && hasCapture) {
-      var map = goog.events.listenerTree_[opt_type];
-      return!!map && !!map[opt_capture] && objUid in map[opt_capture]
-    }
-    return hasType || hasCapture ? goog.array.some(listeners, function(listener) {
-      return hasType && listener.type == opt_type || hasCapture && listener.capture == opt_capture
-    }) : !0
-  }
-  return!1
+goog.log.removeHandler = function(logger, handler) {
+  return goog.log.ENABLED && logger ? logger.removeHandler(handler) : !1
 };
-goog.events.expose = function(e) {
-  var str = [], key;
-  for(key in e) {
-    e[key] && e[key].id ? str.push(key + " = " + e[key] + " (" + e[key].id + ")") : str.push(key + " = " + e[key])
-  }
-  return str.join("\n")
+goog.log.log = function(logger, level, msg, opt_exception) {
+  goog.log.ENABLED && logger && logger.log(level, msg, opt_exception)
 };
-goog.events.getOnString_ = function(type) {
-  return type in goog.events.onStringMap_ ? goog.events.onStringMap_[type] : goog.events.onStringMap_[type] = goog.events.onString_ + type
+goog.log.error = function(logger, msg, opt_exception) {
+  goog.log.ENABLED && logger && logger.severe(msg, opt_exception)
 };
-goog.events.fireListeners = function(obj, type, capture, eventObject) {
-  if(goog.events.Listenable.isImplementedBy(obj)) {
-    return obj.fireListeners(type, capture, eventObject)
-  }
-  var map = goog.events.listenerTree_;
-  return type in map && (map = map[type], capture in map) ? goog.events.fireListeners_(map[capture], obj, type, capture, eventObject) : !0
+goog.log.warning = function(logger, msg, opt_exception) {
+  goog.log.ENABLED && logger && logger.warning(msg, opt_exception)
 };
-goog.events.fireListeners_ = function(map, obj, type, capture, eventObject) {
-  var retval = 1, objUid = goog.getUid(obj);
-  if(map[objUid]) {
-    var remaining = --map.remaining_, listenerArray = map[objUid];
-    listenerArray.locked_ ? listenerArray.locked_++ : listenerArray.locked_ = 1;
-    try {
-      for(var length = listenerArray.length, i = 0;i < length;i++) {
-        var listener = listenerArray[i];
-        listener && !listener.removed && (retval &= !1 !== goog.events.fireListener(listener, eventObject))
-      }
-    }finally {
-      map.remaining_ = Math.max(remaining, map.remaining_), listenerArray.locked_--, goog.events.cleanUp_(type, capture, objUid, listenerArray)
-    }
-  }
-  return Boolean(retval)
+goog.log.info = function(logger, msg, opt_exception) {
+  goog.log.ENABLED && logger && logger.info(msg, opt_exception)
 };
-goog.events.fireListener = function(listener, eventObject) {
-  var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
-  listener.callOnce && goog.events.unlistenByKey(listener);
-  return listenerFn.call(listenerHandler, eventObject)
-};
-goog.events.getTotalListenerCount = function() {
-  return goog.object.getCount(goog.events.listeners_)
-};
-goog.events.dispatchEvent = function(src, e) {
-  goog.asserts.assert(goog.events.Listenable.isImplementedBy(src), "Can not use goog.events.dispatchEvent with non-goog.events.Listenable instance.");
-  return src.dispatchEvent(e)
-};
-goog.events.protectBrowserEventEntryPoint = function(errorHandler) {
-  goog.events.handleBrowserEvent_ = errorHandler.protectEntryPoint(goog.events.handleBrowserEvent_)
-};
-goog.events.handleBrowserEvent_ = function(listener, opt_evt) {
-  if(listener.removed) {
-    return!0
-  }
-  var type = listener.type, map = goog.events.listenerTree_;
-  if(!(type in map)) {
-    return!0
-  }
-  var map = map[type], retval, targetsMap;
-  if(!goog.events.BrowserFeature.HAS_W3C_EVENT_SUPPORT) {
-    var ieEvent = opt_evt || goog.getObjectByName("window.event"), hasCapture = !0 in map, hasBubble = !1 in map;
-    if(hasCapture) {
-      if(goog.events.isMarkedIeEvent_(ieEvent)) {
-        return!0
-      }
-      goog.events.markIeEvent_(ieEvent)
-    }
-    var evt = new goog.events.BrowserEvent;
-    evt.init(ieEvent, this);
-    retval = !0;
-    try {
-      if(hasCapture) {
-        for(var ancestors = [], parent = evt.currentTarget;parent;parent = parent.parentNode) {
-          ancestors.push(parent)
-        }
-        targetsMap = map[!0];
-        targetsMap.remaining_ = targetsMap.count_;
-        for(var i = ancestors.length - 1;!evt.propagationStopped_ && 0 <= i && targetsMap.remaining_;i--) {
-          evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !0, evt)
-        }
-        if(hasBubble) {
-          for(targetsMap = map[!1], targetsMap.remaining_ = targetsMap.count_, i = 0;!evt.propagationStopped_ && i < ancestors.length && targetsMap.remaining_;i++) {
-            evt.currentTarget = ancestors[i], retval &= goog.events.fireListeners_(targetsMap, ancestors[i], type, !1, evt)
-          }
-        }
-      }else {
-        retval = goog.events.fireListener(listener, evt)
-      }
-    }finally {
-      ancestors && (ancestors.length = 0)
-    }
-    return retval
-  }
-  var be = new goog.events.BrowserEvent(opt_evt, this);
-  return retval = goog.events.fireListener(listener, be)
-};
-goog.events.markIeEvent_ = function(e) {
-  var useReturnValue = !1;
-  if(0 == e.keyCode) {
-    try {
-      e.keyCode = -1;
-      return
-    }catch(ex) {
-      useReturnValue = !0
-    }
-  }
-  if(useReturnValue || void 0 == e.returnValue) {
-    e.returnValue = !0
-  }
-};
-goog.events.isMarkedIeEvent_ = function(e) {
-  return 0 > e.keyCode || void 0 != e.returnValue
-};
-goog.events.uniqueIdCounter_ = 0;
-goog.events.getUniqueId = function(identifier) {
-  return identifier + "_" + goog.events.uniqueIdCounter_++
-};
-goog.events.LISTENER_WRAPPER_PROP_ = "__closure_events_fn_" + (1E9 * Math.random() >>> 0);
-goog.events.wrapListener_ = function(listener) {
-  goog.asserts.assert(listener, "Listener can not be null.");
-  if(goog.isFunction(listener)) {
-    return listener
-  }
-  goog.asserts.assert(listener.handleEvent, "An object listener must have handleEvent method.");
-  return listener[goog.events.LISTENER_WRAPPER_PROP_] || (listener[goog.events.LISTENER_WRAPPER_PROP_] = function(e) {
-    return listener.handleEvent(e)
-  })
-};
-goog.debug.entryPointRegistry.register(function(transformer) {
-  goog.events.handleBrowserEvent_ = transformer(goog.events.handleBrowserEvent_)
-});
-goog.events.EventTarget = function() {
-  goog.Disposable.call(this);
-  this.eventTargetListeners_ = {};
-  this.actualEventTarget_ = this
-};
-goog.inherits(goog.events.EventTarget, goog.Disposable);
-goog.events.Listenable.addImplementation(goog.events.EventTarget);
-goog.events.EventTarget.MAX_ANCESTORS_ = 1E3;
-goog.events.EventTarget.prototype.parentEventTarget_ = null;
-goog.events.EventTarget.prototype.getParentEventTarget = function() {
-  return this.parentEventTarget_
-};
-goog.events.EventTarget.prototype.addEventListener = function(type, handler, opt_capture, opt_handlerScope) {
-  goog.events.listen(this, type, handler, opt_capture, opt_handlerScope)
-};
-goog.events.EventTarget.prototype.removeEventListener = function(type, handler, opt_capture, opt_handlerScope) {
-  goog.events.unlisten(this, type, handler, opt_capture, opt_handlerScope)
-};
-goog.events.EventTarget.prototype.dispatchEvent = function(e) {
-  this.assertInitialized_();
-  var ancestorsTree, ancestor = this.getParentEventTarget();
-  if(ancestor) {
-    ancestorsTree = [];
-    for(var ancestorCount = 1;ancestor;ancestor = ancestor.getParentEventTarget()) {
-      ancestorsTree.push(ancestor), goog.asserts.assert(++ancestorCount < goog.events.EventTarget.MAX_ANCESTORS_, "infinite loop")
-    }
-  }
-  return goog.events.EventTarget.dispatchEventInternal_(this.actualEventTarget_, e, ancestorsTree)
-};
-goog.events.EventTarget.prototype.disposeInternal = function() {
-  goog.events.EventTarget.superClass_.disposeInternal.call(this);
-  this.removeAllListeners();
-  this.parentEventTarget_ = null
-};
-goog.events.EventTarget.prototype.assertInitialized_ = function() {
-  goog.asserts.assert(this.eventTargetListeners_, "Event target is not initialized. Did you call superclass (goog.events.EventTarget) constructor?")
-};
-goog.events.EventTarget.prototype.listen = function(type, listener, opt_useCapture, opt_listenerScope) {
-  return this.listenInternal_(type, listener, !1, opt_useCapture, opt_listenerScope)
-};
-goog.events.EventTarget.prototype.listenOnce = function(type, listener, opt_useCapture, opt_listenerScope) {
-  return this.listenInternal_(type, listener, !0, opt_useCapture, opt_listenerScope)
-};
-goog.events.EventTarget.prototype.listenInternal_ = function(type, listener, callOnce, opt_useCapture, opt_listenerScope) {
-  this.assertInitialized_();
-  var listenerArray = this.eventTargetListeners_[type] || (this.eventTargetListeners_[type] = []), listenerObj, index = goog.events.EventTarget.findListenerIndex_(listenerArray, listener, opt_useCapture, opt_listenerScope);
-  if(-1 < index) {
-    return listenerObj = listenerArray[index], callOnce || (listenerObj.callOnce = !1), listenerObj
-  }
-  listenerObj = new goog.events.Listener(listener, null, this, type, !!opt_useCapture, opt_listenerScope);
-  listenerObj.callOnce = callOnce;
-  listenerArray.push(listenerObj);
-  return listenerObj
-};
-goog.events.EventTarget.prototype.unlisten = function(type, listener, opt_useCapture, opt_listenerScope) {
-  if(!(type in this.eventTargetListeners_)) {
-    return!1
-  }
-  var listenerArray = this.eventTargetListeners_[type], index = goog.events.EventTarget.findListenerIndex_(listenerArray, listener, opt_useCapture, opt_listenerScope);
-  if(-1 < index) {
-    var listenerObj = listenerArray[index];
-    goog.events.cleanUp(listenerObj);
-    listenerObj.removed = !0;
-    return goog.array.removeAt(listenerArray, index)
-  }
-  return!1
-};
-goog.events.EventTarget.prototype.unlistenByKey = function(key) {
-  var type = key.type;
-  if(!(type in this.eventTargetListeners_)) {
-    return!1
-  }
-  var removed = goog.array.remove(this.eventTargetListeners_[type], key);
-  removed && (goog.events.cleanUp(key), key.removed = !0);
-  return removed
-};
-goog.events.EventTarget.prototype.removeAllListeners = function(opt_type) {
-  var count = 0, type;
-  for(type in this.eventTargetListeners_) {
-    if(!opt_type || type == opt_type) {
-      for(var listenerArray = this.eventTargetListeners_[type], i = 0;i < listenerArray.length;i++) {
-        ++count, goog.events.cleanUp(listenerArray[i]), listenerArray[i].removed = !0
-      }
-      listenerArray.length = 0
-    }
-  }
-  return count
-};
-goog.events.EventTarget.prototype.fireListeners = function(type, capture, eventObject) {
-  if(!(type in this.eventTargetListeners_)) {
-    return!0
-  }
-  for(var rv = !0, listenerArray = goog.array.clone(this.eventTargetListeners_[type]), i = 0;i < listenerArray.length;++i) {
-    var listener = listenerArray[i];
-    if(listener && !listener.removed && listener.capture == capture) {
-      var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
-      listener.callOnce && this.unlistenByKey(listener);
-      rv = !1 !== listenerFn.call(listenerHandler, eventObject) && rv
-    }
-  }
-  return rv && !1 != eventObject.returnValue_
-};
-goog.events.EventTarget.prototype.getListeners = function(type, capture) {
-  var listenerArray = this.eventTargetListeners_[type], rv = [];
-  if(listenerArray) {
-    for(var i = 0;i < listenerArray.length;++i) {
-      var listenerObj = listenerArray[i];
-      listenerObj.capture == capture && rv.push(listenerObj)
-    }
-  }
-  return rv
-};
-goog.events.EventTarget.prototype.getListener = function(type, listener, capture, opt_listenerScope) {
-  var listenerArray = this.eventTargetListeners_[type], i = -1;
-  listenerArray && (i = goog.events.EventTarget.findListenerIndex_(listenerArray, listener, capture, opt_listenerScope));
-  return-1 < i ? listenerArray[i] : null
-};
-goog.events.EventTarget.prototype.hasListener = function(opt_type, opt_capture) {
-  var hasType = goog.isDef(opt_type), hasCapture = goog.isDef(opt_capture);
-  return goog.object.some(this.eventTargetListeners_, function(listenersArray) {
-    for(var i = 0;i < listenersArray.length;++i) {
-      if(!(hasType && listenersArray[i].type != opt_type || hasCapture && listenersArray[i].capture != opt_capture)) {
-        return!0
-      }
-    }
-    return!1
-  })
-};
-goog.events.EventTarget.dispatchEventInternal_ = function(target, e, opt_ancestorsTree) {
-  var type = e.type || e;
-  if(goog.isString(e)) {
-    e = new goog.events.Event(e, target)
-  }else {
-    if(e instanceof goog.events.Event) {
-      e.target = e.target || target
-    }else {
-      var oldEvent = e;
-      e = new goog.events.Event(type, target);
-      goog.object.extend(e, oldEvent)
-    }
-  }
-  var rv = !0, currentTarget;
-  if(opt_ancestorsTree) {
-    for(var i = opt_ancestorsTree.length - 1;!e.propagationStopped_ && 0 <= i;i--) {
-      currentTarget = e.currentTarget = opt_ancestorsTree[i], rv = currentTarget.fireListeners(type, !0, e) && rv
-    }
-  }
-  e.propagationStopped_ || (currentTarget = e.currentTarget = target, rv = currentTarget.fireListeners(type, !0, e) && rv, e.propagationStopped_ || (rv = currentTarget.fireListeners(type, !1, e) && rv));
-  if(opt_ancestorsTree) {
-    for(i = 0;!e.propagationStopped_ && i < opt_ancestorsTree.length;i++) {
-      currentTarget = e.currentTarget = opt_ancestorsTree[i], rv = currentTarget.fireListeners(type, !1, e) && rv
-    }
-  }
-  return rv
-};
-goog.events.EventTarget.findListenerIndex_ = function(listenerArray, listener, opt_useCapture, opt_listenerScope) {
-  for(var i = 0;i < listenerArray.length;++i) {
-    var listenerObj = listenerArray[i];
-    if(listenerObj.listener == listener && listenerObj.capture == !!opt_useCapture && listenerObj.handler == opt_listenerScope) {
-      return i
-    }
-  }
-  return-1
-};
-goog.json = {};
-goog.json.isValid_ = function(s) {
-  if(/^\s*$/.test(s)) {
-    return!1
-  }
-  var backslashesRe = /\\["\\\/bfnrtu]/g, simpleValuesRe = /"[^"\\\n\r\u2028\u2029\x00-\x08\x0a-\x1f]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, openBracketsRe = /(?:^|:|,)(?:[\s\u2028\u2029]*\[)+/g, remainderRe = /^[\],:{}\s\u2028\u2029]*$/;
-  return remainderRe.test(s.replace(backslashesRe, "@").replace(simpleValuesRe, "]").replace(openBracketsRe, ""))
-};
-goog.json.parse = function(s) {
-  var o = String(s);
-  if(goog.json.isValid_(o)) {
-    try {
-      return eval("(" + o + ")")
-    }catch(ex) {
-    }
-  }
-  throw Error("Invalid JSON string: " + o);
-};
-goog.json.unsafeParse = function(s) {
-  return eval("(" + s + ")")
-};
-goog.json.serialize = function(object, opt_replacer) {
-  return(new goog.json.Serializer(opt_replacer)).serialize(object)
-};
-goog.json.Serializer = function(opt_replacer) {
-  this.replacer_ = opt_replacer
-};
-goog.json.Serializer.prototype.serialize = function(object) {
-  var sb = [];
-  this.serialize_(object, sb);
-  return sb.join("")
-};
-goog.json.Serializer.prototype.serialize_ = function(object, sb) {
-  switch(typeof object) {
-    case "string":
-      this.serializeString_(object, sb);
-      break;
-    case "number":
-      this.serializeNumber_(object, sb);
-      break;
-    case "boolean":
-      sb.push(object);
-      break;
-    case "undefined":
-      sb.push("null");
-      break;
-    case "object":
-      if(null == object) {
-        sb.push("null");
-        break
-      }
-      if(goog.isArray(object)) {
-        this.serializeArray(object, sb);
-        break
-      }
-      this.serializeObject_(object, sb);
-      break;
-    case "function":
-      break;
-    default:
-      throw Error("Unknown type: " + typeof object);
-  }
-};
-goog.json.Serializer.charToJsonCharCache_ = {'"':'\\"', "\\":"\\\\", "/":"\\/", "\b":"\\b", "\f":"\\f", "\n":"\\n", "\r":"\\r", "\t":"\\t", "\x0B":"\\u000b"};
-goog.json.Serializer.charsToReplace_ = /\uffff/.test("\uffff") ? /[\\\"\x00-\x1f\x7f-\uffff]/g : /[\\\"\x00-\x1f\x7f-\xff]/g;
-goog.json.Serializer.prototype.serializeString_ = function(s, sb) {
-  sb.push('"', s.replace(goog.json.Serializer.charsToReplace_, function(c) {
-    if(c in goog.json.Serializer.charToJsonCharCache_) {
-      return goog.json.Serializer.charToJsonCharCache_[c]
-    }
-    var cc = c.charCodeAt(0), rv = "\\u";
-    16 > cc ? rv += "000" : 256 > cc ? rv += "00" : 4096 > cc && (rv += "0");
-    return goog.json.Serializer.charToJsonCharCache_[c] = rv + cc.toString(16)
-  }), '"')
-};
-goog.json.Serializer.prototype.serializeNumber_ = function(n, sb) {
-  sb.push(isFinite(n) && !isNaN(n) ? n : "null")
-};
-goog.json.Serializer.prototype.serializeArray = function(arr, sb) {
-  var l = arr.length;
-  sb.push("[");
-  for(var sep = "", i = 0;i < l;i++) {
-    sb.push(sep);
-    var value = arr[i];
-    this.serialize_(this.replacer_ ? this.replacer_.call(arr, String(i), value) : value, sb);
-    sep = ","
-  }
-  sb.push("]")
-};
-goog.json.Serializer.prototype.serializeObject_ = function(obj, sb) {
-  sb.push("{");
-  var sep = "", key;
-  for(key in obj) {
-    if(Object.prototype.hasOwnProperty.call(obj, key)) {
-      var value = obj[key];
-      "function" != typeof value && (sb.push(sep), this.serializeString_(key, sb), sb.push(":"), this.serialize_(this.replacer_ ? this.replacer_.call(obj, key, value) : value, sb), sep = ",")
-    }
-  }
-  sb.push("}")
+goog.log.fine = function(logger, msg, opt_exception) {
+  goog.log.ENABLED && logger && logger.fine(msg, opt_exception)
 };
 goog.Timer = function(opt_interval, opt_timerObject) {
   goog.events.EventTarget.call(this);
@@ -4763,7 +4812,7 @@
 };
 goog.inherits(goog.net.XhrIo, goog.events.EventTarget);
 goog.net.XhrIo.ResponseType = {DEFAULT:"", TEXT:"text", DOCUMENT:"document", BLOB:"blob", ARRAY_BUFFER:"arraybuffer"};
-goog.net.XhrIo.prototype.logger_ = goog.debug.Logger.getLogger("goog.net.XhrIo");
+goog.net.XhrIo.prototype.logger_ = goog.log.getLogger("goog.net.XhrIo");
 goog.net.XhrIo.CONTENT_TYPE_HEADER = "Content-Type";
 goog.net.XhrIo.HTTP_SCHEME_PATTERN = /^https?$/i;
 goog.net.XhrIo.METHODS_WITH_FORM_DATA = ["POST", "PUT"];
@@ -4812,9 +4861,9 @@
   this.xhrOptions_ = this.xmlHttpFactory_ ? this.xmlHttpFactory_.getOptions() : goog.net.XmlHttp.getOptions();
   this.xhr_.onreadystatechange = goog.bind(this.onReadyStateChange_, this);
   try {
-    this.logger_.fine(this.formatMsg_("Opening Xhr")), this.inOpen_ = !0, this.xhr_.open(method, url, !0), this.inOpen_ = !1
+    goog.log.fine(this.logger_, this.formatMsg_("Opening Xhr")), this.inOpen_ = !0, this.xhr_.open(method, url, !0), this.inOpen_ = !1
   }catch(err) {
-    this.logger_.fine(this.formatMsg_("Error opening Xhr: " + err.message));
+    goog.log.fine(this.logger_, this.formatMsg_("Error opening Xhr: " + err.message));
     this.error_(goog.net.ErrorCode.EXCEPTION, err);
     return
   }
@@ -4830,10 +4879,10 @@
   this.responseType_ && (this.xhr_.responseType = this.responseType_);
   goog.object.containsKey(this.xhr_, "withCredentials") && (this.xhr_.withCredentials = this.withCredentials_);
   try {
-    this.cleanUpTimeoutTimer_(), 0 < this.timeoutInterval_ && (this.useXhr2Timeout_ = goog.net.XhrIo.shouldUseXhr2Timeout_(this.xhr_), this.logger_.fine(this.formatMsg_("Will abort after " + this.timeoutInterval_ + "ms if incomplete, xhr2 " + this.useXhr2Timeout_)), this.useXhr2Timeout_ ? (this.xhr_[goog.net.XhrIo.XHR2_TIMEOUT_] = this.timeoutInterval_, this.xhr_[goog.net.XhrIo.XHR2_ON_TIMEOUT_] = goog.bind(this.timeout_, this)) : this.timeoutId_ = goog.Timer.callOnce(this.timeout_, this.timeoutInterval_, 
-    this)), this.logger_.fine(this.formatMsg_("Sending request")), this.inSend_ = !0, this.xhr_.send(content), this.inSend_ = !1
+    this.cleanUpTimeoutTimer_(), 0 < this.timeoutInterval_ && (this.useXhr2Timeout_ = goog.net.XhrIo.shouldUseXhr2Timeout_(this.xhr_), goog.log.fine(this.logger_, this.formatMsg_("Will abort after " + this.timeoutInterval_ + "ms if incomplete, xhr2 " + this.useXhr2Timeout_)), this.useXhr2Timeout_ ? (this.xhr_[goog.net.XhrIo.XHR2_TIMEOUT_] = this.timeoutInterval_, this.xhr_[goog.net.XhrIo.XHR2_ON_TIMEOUT_] = goog.bind(this.timeout_, this)) : this.timeoutId_ = goog.Timer.callOnce(this.timeout_, this.timeoutInterval_, 
+    this)), goog.log.fine(this.logger_, this.formatMsg_("Sending request")), this.inSend_ = !0, this.xhr_.send(content), this.inSend_ = !1
   }catch(err$$0) {
-    this.logger_.fine(this.formatMsg_("Send error: " + err$$0.message)), this.error_(goog.net.ErrorCode.EXCEPTION, err$$0)
+    goog.log.fine(this.logger_, this.formatMsg_("Send error: " + err$$0.message)), this.error_(goog.net.ErrorCode.EXCEPTION, err$$0)
   }
 };
 goog.net.XhrIo.shouldUseXhr2Timeout_ = function(xhr) {
@@ -4846,7 +4895,7 @@
   return this.xmlHttpFactory_ ? this.xmlHttpFactory_.createInstance() : goog.net.XmlHttp()
 };
 goog.net.XhrIo.prototype.timeout_ = function() {
-  "undefined" != typeof goog && this.xhr_ && (this.lastError_ = "Timed out after " + this.timeoutInterval_ + "ms, aborting", this.logger_.fine(this.formatMsg_(this.lastError_)), this.dispatchEvent(goog.net.EventType.TIMEOUT), this.abort(goog.net.ErrorCode.TIMEOUT))
+  "undefined" != typeof goog && this.xhr_ && (this.lastError_ = "Timed out after " + this.timeoutInterval_ + "ms, aborting", goog.log.fine(this.logger_, this.formatMsg_(this.lastError_)), this.dispatchEvent(goog.net.EventType.TIMEOUT), this.abort(goog.net.ErrorCode.TIMEOUT))
 };
 goog.net.XhrIo.prototype.error_ = function(errorCode, err) {
   this.active_ = !1;
@@ -4859,7 +4908,7 @@
   this.errorDispatched_ || (this.errorDispatched_ = !0, this.dispatchEvent(goog.net.EventType.COMPLETE), this.dispatchEvent(goog.net.EventType.ERROR))
 };
 goog.net.XhrIo.prototype.abort = function() {
-  this.xhr_ && this.active_ && (this.logger_.fine(this.formatMsg_("Aborting")), this.active_ = !1, this.inAbort_ = !0, this.xhr_.abort(), this.inAbort_ = !1, this.dispatchEvent(goog.net.EventType.COMPLETE), this.dispatchEvent(goog.net.EventType.ABORT), this.cleanUpXhr_())
+  this.xhr_ && this.active_ && (goog.log.fine(this.logger_, this.formatMsg_("Aborting")), this.active_ = !1, this.inAbort_ = !0, this.xhr_.abort(), this.inAbort_ = !1, this.dispatchEvent(goog.net.EventType.COMPLETE), this.dispatchEvent(goog.net.EventType.ABORT), this.cleanUpXhr_())
 };
 goog.net.XhrIo.prototype.disposeInternal = function() {
   this.xhr_ && (this.active_ && (this.active_ = !1, this.inAbort_ = !0, this.xhr_.abort(), this.inAbort_ = !1), this.cleanUpXhr_(!0));
@@ -4880,13 +4929,13 @@
 goog.net.XhrIo.prototype.onReadyStateChangeHelper_ = function() {
   if(this.active_ && "undefined" != typeof goog) {
     if(this.xhrOptions_[goog.net.XmlHttp.OptionType.LOCAL_REQUEST_ERROR] && this.getReadyState() == goog.net.XmlHttp.ReadyState.COMPLETE && 2 == this.getStatus()) {
-      this.logger_.fine(this.formatMsg_("Local request error detected and ignored"))
+      goog.log.fine(this.logger_, this.formatMsg_("Local request error detected and ignored"))
     }else {
       if(this.inSend_ && this.getReadyState() == goog.net.XmlHttp.ReadyState.COMPLETE) {
         goog.Timer.callOnce(this.onReadyStateChange_, 0, this)
       }else {
         if(this.dispatchEvent(goog.net.EventType.READY_STATE_CHANGE), this.isComplete()) {
-          this.logger_.fine(this.formatMsg_("Request complete"));
+          goog.log.fine(this.logger_, this.formatMsg_("Request complete"));
           this.active_ = !1;
           try {
             this.isSuccess() ? (this.dispatchEvent(goog.net.EventType.COMPLETE), this.dispatchEvent(goog.net.EventType.SUCCESS)) : (this.lastError_ = this.getStatusText() + " [" + this.getStatus() + "]", this.dispatchErrors_())
@@ -4907,7 +4956,7 @@
     try {
       xhr.onreadystatechange = clearedOnReadyStateChange
     }catch(e) {
-      this.logger_.severe("Problem encountered resetting onreadystatechange: " + e.message)
+      goog.log.error(this.logger_, "Problem encountered resetting onreadystatechange: " + e.message)
     }
   }
 };
@@ -4933,21 +4982,21 @@
   try {
     return this.getReadyState() > goog.net.XmlHttp.ReadyState.LOADED ? this.xhr_.status : -1
   }catch(e) {
-    return this.logger_.warning("Can not get status: " + e.message), -1
+    return goog.log.warning(this.logger_, "Can not get status: " + e.message), -1
   }
 };
 goog.net.XhrIo.prototype.getStatusText = function() {
   try {
     return this.getReadyState() > goog.net.XmlHttp.ReadyState.LOADED ? this.xhr_.statusText : ""
   }catch(e) {
-    return this.logger_.fine("Can not get status: " + e.message), ""
+    return goog.log.fine(this.logger_, "Can not get status: " + e.message), ""
   }
 };
 goog.net.XhrIo.prototype.getResponseText = function() {
   try {
     return this.xhr_ ? this.xhr_.responseText : ""
   }catch(e) {
-    return this.logger_.fine("Can not get responseText: " + e.message), ""
+    return goog.log.fine(this.logger_, "Can not get responseText: " + e.message), ""
   }
 };
 goog.net.XhrIo.prototype.getResponseHeader = function(key) {
diff --git a/google/appengine/tools/dev_appserver.py b/google/appengine/tools/dev_appserver.py
index 4ab50fc..930e63c 100644
--- a/google/appengine/tools/dev_appserver.py
+++ b/google/appengine/tools/dev_appserver.py
@@ -1604,20 +1604,6 @@
     exec_py27_handler: Used for dependency injection.
   """
 
-
-
-
-
-
-
-
-
-  if handler_path == '_go_app':
-    from google.appengine.ext.go import execute_go_cgi
-    return execute_go_cgi(root_path, config, handler_path, cgi_path,
-        env, infile, outfile)
-
-
   old_module_dict = sys.modules.copy()
   old_builtin = __builtin__.__dict__.copy()
   old_argv = sys.argv
diff --git a/google/appengine/tools/dev_appserver_channel.py b/google/appengine/tools/dev_appserver_channel.py
index a6385a6..6e49bca 100644
--- a/google/appengine/tools/dev_appserver_channel.py
+++ b/google/appengine/tools/dev_appserver_channel.py
@@ -33,6 +33,9 @@
 import os
 import urlparse
 
+from google.appengine.api.channel.channel_service_stub import InvalidTokenError
+from google.appengine.api.channel.channel_service_stub import TokenTimedOutError
+
 
 
 CHANNEL_POLL_PATTERN = '/_ah/channel/dev(?:/.*)?'
@@ -105,15 +108,18 @@
         outfile.write(open(path).read())
       elif page == 'dev':
         token = param_dict['channel'][0]
-        (syntax_valid, time_valid) = (
-            self._channel_service_stub.check_token_validity(token))
-        if not (syntax_valid and time_valid):
+
+        token_error = None
+        try:
+          self._channel_service_stub.validate_token_and_extract_client_id(token)
 
 
-          if not syntax_valid:
-            token_error = 'Invalid+token.'
-          else:
-            token_error = 'Token+timed+out.'
+        except InvalidTokenError:
+          token_error = 'Invalid+token.'
+        except TokenTimedOutError:
+          token_error = 'Token+timed+out.'
+
+        if token_error is not None:
           outfile.write('Status: 401 %s\r\n\r\n' % token_error)
           return
 
diff --git a/google/appengine/tools/devappserver2/admin/cron_handler.py b/google/appengine/tools/devappserver2/admin/cron_handler.py
index 8483a10..f5cce08 100644
--- a/google/appengine/tools/devappserver2/admin/cron_handler.py
+++ b/google/appengine/tools/devappserver2/admin/cron_handler.py
@@ -75,7 +75,7 @@
     """
     for cron_yaml in ('cron.yaml', 'cron.yml'):
       try:
-        with open(os.path.join(self.configuration.servers[0].application_root,
+        with open(os.path.join(self.configuration.modules[0].application_root,
                                cron_yaml)) as f:
           cron_info = croninfo.LoadSingleCron(f)
           return cron_info
diff --git a/google/appengine/tools/devappserver2/api_server.py b/google/appengine/tools/devappserver2/api_server.py
index 3796cf6..1a86f59 100644
--- a/google/appengine/tools/devappserver2/api_server.py
+++ b/google/appengine/tools/devappserver2/api_server.py
@@ -218,7 +218,8 @@
     taskqueue_auto_run_tasks,
     taskqueue_default_http_server,
     user_login_url,
-    user_logout_url):
+    user_logout_url,
+    default_gcs_bucket_name):
   """Configures the APIs hosted by this server.
 
   Args:
@@ -270,11 +271,13 @@
     user_login_url: A str containing the url that should be used for user login.
     user_logout_url: A str containing the url that should be used for user
         logout.
+    default_gcs_bucket_name: A str, overriding the default bucket behavior.
   """
 
-  apiproxy_stub_map.apiproxy.RegisterStub(
-      'app_identity_service',
-      app_identity_stub.AppIdentityServiceStub())
+  identity_stub = app_identity_stub.AppIdentityServiceStub()
+  if default_gcs_bucket_name is not None:
+    identity_stub.SetDefaultGcsBucketName(default_gcs_bucket_name)
+  apiproxy_stub_map.apiproxy.RegisterStub('app_identity_service', identity_stub)
 
   blob_storage = file_blob_storage.FileBlobStorage(blobstore_path, app_id)
   apiproxy_stub_map.apiproxy.RegisterStub(
@@ -467,7 +470,8 @@
     taskqueue_auto_run_tasks=False,
     taskqueue_default_http_server='http://localhost:8080',
     user_login_url='/_ah/login?continue=%s',
-    user_logout_url='/_ah/login?continue=%s'):
+    user_logout_url='/_ah/login?continue=%s',
+    default_gcs_bucket_name=None):
   """Similar to setup_stubs with reasonable test defaults and recallable."""
 
   # Reset the stub map between requests because a stub map only allows a
@@ -500,7 +504,8 @@
               taskqueue_auto_run_tasks,
               taskqueue_default_http_server,
               user_login_url,
-              user_logout_url)
+              user_logout_url,
+              default_gcs_bucket_name)
 
 
 def cleanup_stubs():
diff --git a/google/appengine/tools/devappserver2/blob_download_test.py b/google/appengine/tools/devappserver2/blob_download_test.py
index 663730f..eb85461 100644
--- a/google/appengine/tools/devappserver2/blob_download_test.py
+++ b/google/appengine/tools/devappserver2/blob_download_test.py
@@ -471,7 +471,7 @@
 class BlobDownloadTestGoogleStorage(BlobDownloadTest):
   """Executes all of the superclass tests with a Google Storage object."""
 
-  def create_blob(self):
+  def create_blob(self, content_type='image/png'):
     """Create a GS object in the datastore and on disk.
 
     Overrides the superclass create_blob method.
@@ -482,12 +482,33 @@
     data = 'a blob'
     filename = '/some_bucket/some_object'
     stub = cloudstorage_stub.CloudStorageStub(self.blob_storage)
-    blob_key = stub.post_start_creation(filename, {'content-type': 'image/png'})
+    options = {}
+    if content_type:
+      options['content-type'] = content_type
+    blob_key = stub.post_start_creation(filename, options)
     stub.put_continue_creation(blob_key, data, (0, len(data) - 1), True)
     self.blob_storage.StoreBlob(blob_key, cStringIO.StringIO(data))
 
     return blob_key
 
+  def test_default_content_type(self):
+    """Tests downloads when upload does not specify content-type."""
+    blob_key = self.create_blob(content_type=None)
+
+    headers = [(blobstore.BLOB_KEY_HEADER, str(blob_key))]
+    state = request_rewriter.RewriterState({}, '200 original message', headers,
+                                           'original body')
+
+    blob_download.blobstore_download_rewriter(state)
+
+    self.assertEqual('200 original message', state.status)
+    expected_headers = {
+        'Content-Length': '6',
+        'Content-Type': cloudstorage_stub._GCS_DEFAULT_CONTENT_TYPE,
+    }
+    self.assertHeadersEqual(expected_headers, state.headers)
+    self.assertEqual('a blob', ''.join(state.body))
+
 
 class BlobDownloadIntegrationTest(DownloadTestBase,
                                   wsgi_test_utils.RewriterTestCase):
diff --git a/google/appengine/tools/devappserver2/channel_test.py b/google/appengine/tools/devappserver2/channel_test.py
index a605f7b..95fedd5 100644
--- a/google/appengine/tools/devappserver2/channel_test.py
+++ b/google/appengine/tools/devappserver2/channel_test.py
@@ -55,11 +55,7 @@
     del self._messages[token]
 
   def connect_channel(self, token):
-    syntax_valid, time_valid = self.check_token_validity(token)
-    if not syntax_valid:
-      raise channel_service_stub.InvalidTokenError()
-    elif not time_valid:
-      raise channel_service_stub.TokenTimedOutError()
+    self.validate_token_and_extract_client_id(token)
     self._connected_channel_tokens.append(token)
 
   def disconnect_channel(self, client_id):
@@ -71,8 +67,12 @@
   def set_connected_tokens(self, tokens):
     self._connected_tokens = tokens
 
-  def check_token_validity(self, token):
-    return (token != 'bad', token != 'expired')
+  def validate_token_and_extract_client_id(self, token):
+    if token == 'bad':
+      raise channel_service_stub.InvalidTokenError()
+    if token == 'expired':
+      raise channel_service_stub.TokenTimedOutError()
+    return 'dummy-client-id'
 
   def connect_and_pop_first_message(self, token):
     self.connect_channel(token)
diff --git a/google/appengine/tools/devappserver2/devappserver2.py b/google/appengine/tools/devappserver2/devappserver2.py
index 039f7b1..332237b 100644
--- a/google/appengine/tools/devappserver2/devappserver2.py
+++ b/google/appengine/tools/devappserver2/devappserver2.py
@@ -156,7 +156,16 @@
           1. "5" - All modules are limited to 5 instances.
           2. "default:3,backend:20" - The default module can have 3 instances,
              "backend" can have 20 instances and all other modules are
-              unaffected.
+              unaffected. An empty name (i.e. ":3") is shorthand for default
+              to match how not specifying a module name in the yaml is the
+              same as specifying "module: default".
+  Returns:
+    The parsed value of the max_module_instances flag. May either be an int
+    (for values of the form "5") or a dict of str->int (for values of the
+    form "default:3,backend:20").
+
+  Raises:
+    argparse.ArgumentTypeError: the value is invalid.
   """
   if ':' not in value:
     try:
@@ -179,9 +188,14 @@
             'Expected "module:max_instances": %r' % module_instance_max)
       else:
         module_name = module_name.strip()
+        if not module_name:
+          module_name = 'default'
         if module_name in module_to_max_instances:
           raise argparse.ArgumentTypeError(
               'Duplicate max instance value: %r' % module_name)
+        if not max_instances:
+          raise argparse.ArgumentTypeError(
+              'Cannot specify zero instances for module %s' % module_name)
         module_to_max_instances[module_name] = max_instances
     return module_to_max_instances
 
@@ -423,7 +437,7 @@
       action=boolean_action.BooleanAction,
       const=True,
       default=False,
-      help='make files specified in the app.yaml "skip_files" or "static"'
+      help='make files specified in the app.yaml "skip_files" or "static" '
       'handles readable by the application.')
   misc_group.add_argument(
       '--api_port', type=PortParser(), default=0,
@@ -449,6 +463,9 @@
       default=False,
       help='skip checking for SDK updates (if false, use .appcfg_nag to '
       'decide)')
+  misc_group.add_argument(
+      '--default_gcs_bucket_name', default=None,
+      help='default Google Cloud Storgage bucket name')
 
 
   return parser
@@ -554,6 +571,8 @@
       if options.python_startup_args:
         python_config.startup_args = options.python_startup_args
 
+    php_executable_path = (options.php_executable_path and
+                           os.path.abspath(options.php_executable_path))
     cloud_sql_config = runtime_config_pb2.CloudSQL()
     cloud_sql_config.mysql_host = options.mysql_host
     cloud_sql_config.mysql_port = options.mysql_port
@@ -577,7 +596,7 @@
         options.port,
         options.auth_domain,
         _LOG_LEVEL_TO_RUNTIME_CONSTANT[options.log_level],
-        options.php_executable_path,
+        php_executable_path,
         options.php_remote_debugging,
         python_config,
         cloud_sql_config,
@@ -662,7 +681,8 @@
         taskqueue_auto_run_tasks=options.enable_task_running,
         taskqueue_default_http_server=application_address,
         user_login_url=user_login_url,
-        user_logout_url=user_logout_url)
+        user_logout_url=user_logout_url,
+        default_gcs_bucket_name=options.default_gcs_bucket_name)
 
     # The APIServer must bind to localhost because that is what the runtime
     # instances talk to.
diff --git a/google/appengine/tools/devappserver2/devappserver2_test.py b/google/appengine/tools/devappserver2/devappserver2_test.py
index a45333f..885909f 100644
--- a/google/appengine/tools/devappserver2/devappserver2_test.py
+++ b/google/appengine/tools/devappserver2/devappserver2_test.py
@@ -215,6 +215,20 @@
         argparse.ArgumentTypeError,
         devappserver2.parse_max_module_instances, 'default:5,default:10')
 
+  def test_multiple_with_zero(self):
+    self.assertRaises(
+        argparse.ArgumentTypeError,
+        devappserver2.parse_max_module_instances, 'default:5,foo:0')
+
+  def test_multiple_missing_name(self):
+    self.assertEqual(
+        {'default': 10},
+        devappserver2.parse_max_module_instances(':10'))
+
+  def test_multiple_missing_value(self):
+    self.assertRaises(
+        argparse.ArgumentTypeError,
+        devappserver2.parse_max_module_instances, 'default:')
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/google/appengine/tools/devappserver2/endpoints/api_config_manager.py b/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
index 6b02cda..72577eb 100644
--- a/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
+++ b/google/appengine/tools/devappserver2/endpoints/api_config_manager.py
@@ -38,9 +38,19 @@
   def __init__(self):
     self._rpc_method_dict = {}
     self._rest_methods = []
-    self.configs = {}
+    self._configs = {}
     self._config_lock = threading.Lock()
 
+  @property
+  def configs(self):
+    """Return a dict with the current configuration mappings.
+
+    Returns:
+      A dict with the current configuration mappings.
+    """
+    with self._config_lock:
+      return self._configs.copy()
+
   def _convert_https_to_http(self, config):
     """Switch the URLs in one API configuration to use HTTP instead of HTTPS.
 
@@ -89,9 +99,9 @@
           else:
             lookup_key = config.get('name', ''), config.get('version', '')
             self._convert_https_to_http(config)
-            self.configs[lookup_key] = config
+            self._configs[lookup_key] = config
 
-        for config in self.configs.itervalues():
+        for config in self._configs.itervalues():
           name = config.get('name', '')
           version = config.get('version', '')
           sorted_methods = self._get_sorted_methods(config.get('methods', {}))
@@ -241,9 +251,14 @@
     return method_name, method, params
 
   def _add_discovery_config(self):
+    """Add the Discovery configuration to our list of configs.
+
+    This should only be called with self._config_lock.  The code here assumes
+    the lock is held.
+    """
     lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'],
                   discovery_service.DiscoveryService.API_CONFIG['version'])
-    self.configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
+    self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
 
   @staticmethod
   def _to_safe_path_param_name(matched_parameter):
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
index 77d9e0f..5ea4ef7 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server.py
@@ -38,6 +38,7 @@
 from google.appengine.tools.devappserver2.endpoints import discovery_api_proxy
 from google.appengine.tools.devappserver2.endpoints import discovery_service
 from google.appengine.tools.devappserver2.endpoints import errors
+from google.appengine.tools.devappserver2.endpoints import parameter_converter
 from google.appengine.tools.devappserver2.endpoints import util
 
 
@@ -309,7 +310,7 @@
                                             spi_request.body,
                                             spi_request.source_ip)
     return self.handle_spi_response(orig_request, spi_request, response,
-                                    start_response)
+                                    method_config, start_response)
 
   class __CheckCorsHeaders(object):
     """Track information about CORS headers and our response to them."""
@@ -350,7 +351,7 @@
         headers[_CORS_HEADER_ALLOW_HEADERS] = self.cors_request_headers
 
   def handle_spi_response(self, orig_request, spi_request, response,
-                          start_response):
+                          method_config, start_response):
     """Handle SPI response, transforming output as needed.
 
     This calls start_response and returns the response body.
@@ -360,6 +361,7 @@
       spi_request: An ApiRequest, the transformed request that was sent to the
         SPI handler.
       response: A ResponseTuple, the response from the SPI handler.
+      method_config: A dict, the API config of the method to be called.
       start_response: A function with semantics defined in PEP-333.
 
     Returns:
@@ -381,6 +383,13 @@
     if orig_request.is_rpc():
       body = self.transform_jsonrpc_response(spi_request, response.content)
     else:
+      # Check if the response from the SPI was empty.  Empty REST responses
+      # generate a HTTP 204.
+      empty_response = self.check_empty_response(orig_request, method_config,
+                                                 start_response)
+      if empty_response is not None:
+        return empty_response
+
       body = self.transform_rest_response(response.content)
 
     cors_handler = EndpointsDispatcher.__CheckCorsHeaders(orig_request)
@@ -462,66 +471,6 @@
     request.path = method_config.get('rosyMethod', '')
     return request
 
-  def _check_enum(self, parameter_name, value, field_parameter):
-    """Checks if the parameter value is valid if an enum.
-
-    If the parameter is not an enum, does nothing. If it is, verifies that
-    its value is valid.
-
-    Args:
-      parameter_name: A string containing the name of the parameter, which is
-        either just a variable name or the name with the index appended. For
-        example 'var' or 'var[2]'.
-      value: A string or list of strings containing the value(s) to be used as
-        enum(s) for the parameter.
-      field_parameter: The dictionary containing information specific to the
-        field in question. This is retrieved from request.parameters in the
-        method config.
-
-    Raises:
-      EnumRejectionError: If the given value is not among the accepted
-        enum values in the field parameter.
-    """
-    if 'enum' not in field_parameter:
-      return
-
-    enum_values = [enum['backendValue']
-                   for enum in field_parameter['enum'].values()
-                   if 'backendValue' in enum]
-    if value not in enum_values:
-      raise errors.EnumRejectionError(parameter_name, value, enum_values)
-
-  def _check_parameter(self, parameter_name, value, field_parameter):
-    """Checks if the parameter value is valid against all parameter rules.
-
-    If the value is a list this will recursively call _check_parameter
-    on the values in the list. Otherwise, it checks all parameter rules for the
-    the current value.
-
-    In the list case, '[index-of-value]' is appended to the parameter name for
-    error reporting purposes.
-
-    Currently only checks if value adheres to enum rule, but more checks may be
-    added.
-
-    Args:
-      parameter_name: A string containing the name of the parameter, which is
-        either just a variable name or the name with the index appended, in the
-        recursive case. For example 'var' or 'var[2]'.
-      value: A string or list of strings containing the value(s) to be used for
-        the parameter.
-      field_parameter: The dictionary containing information specific to the
-        field in question. This is retrieved from request.parameters in the
-        method config.
-    """
-    if isinstance(value, list):
-      for index, element in enumerate(value):
-        parameter_name_index = '%s[%d]' % (parameter_name, index)
-        self._check_parameter(parameter_name_index, element, field_parameter)
-      return
-
-    self._check_enum(parameter_name, value, field_parameter)
-
   def _add_message_field(self, field_name, value, params):
     """Converts a . delimitied field name to a message field in parameters.
 
@@ -639,10 +588,11 @@
 
       # Order is important here.  Parameter names are dot-delimited in
       # parameters instead of nested in dictionaries as a message field is, so
-      # we need to call _check_parameter on them before calling
+      # we need to call transform_parameter_value on them before calling
       # _add_message_field.
 
-      self._check_parameter(key, body_json[key], current_parameter)
+      body_json[key] = parameter_converter.transform_parameter_value(
+          key, body_json[key], current_parameter)
       # Remove the old key and try to convert to nested message value
       message_value = body_json.pop(key)
       self._add_message_field(key, message_value, body_json)
@@ -683,6 +633,28 @@
     if status_code >= 300:
       raise errors.BackendError(response)
 
+  def check_empty_response(self, orig_request, method_config, start_response):
+    """If the response from the SPI is empty, return a HTTP 204 No Content.
+
+    Args:
+      orig_request: An ApiRequest, the original request from the user.
+      method_config: A dict, the API config of the method to be called.
+      start_response: A function with semantics defined in PEP-333.
+
+    Returns:
+      If the SPI response was empty, this returns a string containing the
+      response body that should be returned to the user.  If the SPI response
+      wasn't empty, this returns None, indicating that we should not exit early
+      with a 204.
+    """
+    response_config = method_config.get('response', {}).get('body')
+    if response_config == 'empty':
+      # The response to this function should be empty.  We should return a 204.
+      # Note that it's possible that the SPI returned something, but we'll
+      # ignore it.  This matches the behavior in the Endpoints server.
+      cors_handler = EndpointsDispatcher.__CheckCorsHeaders(orig_request)
+      return util.send_wsgi_no_content_response(start_response, cors_handler)
+
   def transform_rest_response(self, response_body):
     """Translates an apiserving REST response so it's ready to return.
 
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
index 1b245b0..493b4dd 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server_regtest.py
@@ -140,6 +140,22 @@
                          'var_sint64': '-554', 'var_uint64': '4321'}
     self.assertEqual(expected_response, response_json)
 
+  def test_empty_test(self):
+    """Test that an empty response that should have an object returns 200."""
+    status, content, headers = self.fetch_url(
+        'default', 'GET', '/_ah/api/test_service/v1/empty_test')
+    self.assertEqual(200, status)
+    self.assertEqual('2', headers['Content-Length'])
+    self.assertEqual('{}', content)
+
+  def test_empty_response(self):
+    """An empty response that should be empty should return 204."""
+    status, content, headers = self.fetch_url(
+        'default', 'GET', '/_ah/api/test_service/v1/empty_response')
+    self.assertEqual(204, status)
+    self.assertEqual('0', headers['Content-Length'])
+    self.assertEqual('', content)
+
   def test_discovery_config(self):
     """Test that the discovery configuration looks right."""
     status, content, headers = self.fetch_url(
diff --git a/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py b/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
index e6e7a17..5c8c037 100644
--- a/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py
@@ -125,7 +125,7 @@
     self.mox.StubOutWithMock(self.server, 'handle_spi_response')
     self.server.handle_spi_response(
         mox.IsA(api_request.ApiRequest), mox.IsA(api_request.ApiRequest),
-        spi_response, self.start_response).AndReturn('Test')
+        spi_response, mox.IsA(dict), self.start_response).AndReturn('Test')
 
     # Run the test.
     self.mox.ReplayAll()
@@ -392,7 +392,7 @@
         200, [('Content-type', 'text/plain')],
         'This is an invalid response.')
     response = self.server.handle_spi_response(orig_request, spi_request,
-                                               spi_response,
+                                               spi_response, {},
                                                self.start_response)
     error_json = {'error': {'message':
                             'Non-JSON reply: This is an invalid response.'}}
@@ -445,7 +445,7 @@
           '200 OK', [('Content-type', 'application/json')], '{}')
 
     response = self.server.handle_spi_response(orig_request, spi_request,
-                                               server_response,
+                                               server_response, {},
                                                self.start_response)
 
     headers = dict(self.response_headers)
@@ -534,7 +534,7 @@
                                             '{"some": "response"}')
 
     response = self.server.handle_spi_response(orig_request, spi_request,
-                                               spi_response,
+                                               spi_response, {},
                                                self.start_response)
     response = ''.join(response)  # Merge response iterator into single body.
 
@@ -555,7 +555,7 @@
                                             '{"some": "response"}')
 
     response = self.server.handle_spi_response(orig_request, spi_request,
-                                               spi_response,
+                                               spi_response, {},
                                                self.start_response)
     response = ''.join(response)  # Merge response iterator into single body.
 
@@ -570,7 +570,7 @@
     body = json.dumps({'some': 'response'}, indent=1)
     spi_response = dispatcher.ResponseTuple('200 OK', [('a', 'b')], body)
     response = self.server.handle_spi_response(orig_request, spi_request,
-                                               spi_response,
+                                               spi_response, {},
                                                self.start_response)
     self.assert_http_match(response, '200 OK',
                            [('a', 'b'),
@@ -631,6 +631,27 @@
     # Specified content type not matched
     self.assertEqual(False, self.server.verify_response(response, 200, 'a'))
 
+  def test_check_empty_response(self):
+    """Test that check_empty_response returns 204 for an empty response."""
+    orig_request = test_utils.build_request('/_ah/api/test', '{}')
+    method_config = {'response': {'body': 'empty'}}
+    empty_response = self.server.check_empty_response(orig_request,
+                                                      method_config,
+                                                      self.start_response)
+    self.assert_http_match(empty_response, 204, [('Content-Length', '0')], '')
+
+  def test_check_non_empty_response(self):
+    """Test that check_empty_response returns None for a non-empty response."""
+    orig_request = test_utils.build_request('/_ah/api/test', '{}')
+    method_config = {'response': {'body': 'autoTemplate(backendResponse)'}}
+    empty_response = self.server.check_empty_response(orig_request,
+                                                      method_config,
+                                                      self.start_response)
+    self.assertIsNone(empty_response)
+    self.assertIsNone(self.response_status)
+    self.assertIsNone(self.response_headers)
+    self.assertIsNone(self.response_exc_info)
+
 
 class TransformRequestTests(unittest.TestCase):
   """Tests that only hit the request transformation functions."""
@@ -1015,5 +1036,53 @@
                                      body_object, expected,
                                      method_params=method_params)
 
+  # Other tests.
+
+  def test_type_conversions(self):
+    """Verify that type conversion matches prod."""
+    path_parameters = {'int32_val': '1', 'uint32_val': '2',
+                       'int64_val': '3', 'uint64_val': '4',
+                       'true_bool_val': 'true', 'false_bool_val': 'FALSE'}
+    query_parameters = {'float_val': ['5.25'], 'double_val': ['6.5']}
+    body_object = {'int_body_val': '7'}
+    expected = {'int32_val': 1,
+                'uint32_val': 2,
+                'int64_val': '3',
+                'uint64_val': '4',
+                'true_bool_val': True,
+                'false_bool_val': False,
+                'float_val': 5.25,
+                'double_val': 6.5,
+                'int_body_val': '7'}
+    method_params = {'int32_val': {'type': 'int32'},
+                     'uint32_val': {'type': 'uint32'},
+                     'int64_val': {'type': 'int64'},
+                     'uint64_val': {'type': 'uint64'},
+                     'true_bool_val': {'type': 'boolean'},
+                     'false_bool_val': {'type': 'boolean'},
+                     'float_val': {'type': 'float'},
+                     'double_val': {'type': 'double'},
+                     'int_body_val': {'type': 'int32'}}
+    self._try_transform_rest_request(path_parameters, query_parameters,
+                                     body_object, expected, method_params)
+
+  def test_invalid_conversions(self):
+    """Verify that invalid parameter values for basic types raise errors."""
+    for type_name in ('int32', 'uint32', 'boolean', 'float', 'double'):
+      param_name = '%s_val' % type_name
+      path_parameters = {param_name: 'invalid'}
+      query_parameters = {}
+      body_object = {}
+      expected = {}
+      method_params = {param_name: {'type': type_name}}
+
+      try:
+        self._try_transform_rest_request(path_parameters, query_parameters,
+                                         body_object, expected,
+                                         method_params=method_params)
+        self.fail('Bad %s value should have caused failure.' % type_name)
+      except errors.BasicTypeParameterError as error:
+        self.assertEqual(error.parameter_name, param_name)
+
 if __name__ == '__main__':
   unittest.main()
diff --git a/google/appengine/tools/devappserver2/endpoints/errors.py b/google/appengine/tools/devappserver2/endpoints/errors.py
index bcff206..678f194 100644
--- a/google/appengine/tools/devappserver2/endpoints/errors.py
+++ b/google/appengine/tools/devappserver2/endpoints/errors.py
@@ -26,11 +26,14 @@
 
 
 __all__ = ['BackendError',
+           'BasicTypeParameterError',
            'EnumRejectionError',
+           'InvalidParameterError',
            'RequestError',
            'RequestRejectionError']
 
 _INVALID_ENUM_TEMPLATE = 'Invalid string value: %r. Allowed values: %r'
+_INVALID_BASIC_PARAM_TEMPLATE = 'Invalid %s value: %r.'
 
 
 class RequestError(Exception):
@@ -143,27 +146,23 @@
     return 400
 
 
+class InvalidParameterError(RequestRejectionError):
+  """Base class for invalid parameter errors.
 
-class EnumRejectionError(RequestRejectionError):
-  """Custom request rejection exception for enum values."""
+  Child classes only need to implement the message() function.
+  """
 
-  def __init__(self, parameter_name, value, allowed_values):
-    """Constructor for EnumRejectionError.
+  def __init__(self, parameter_name, value):
+    """Constructor for InvalidParameterError.
 
     Args:
-      parameter_name: String; the name of the enum parameter which had a value
+      parameter_name: String; the name of the parameter which had a value
         rejected.
-      value: The actual value passed in for the enum. Usually string.
-      allowed_values: List of strings allowed for the enum.
+      value: The actual value passed in for the parameter. Usually string.
     """
-    super(EnumRejectionError, self).__init__()
+    super(InvalidParameterError, self).__init__()
     self.parameter_name = parameter_name
     self.value = value
-    self.allowed_values = allowed_values
-
-  def message(self):
-    """A descriptive message describing the error."""
-    return _INVALID_ENUM_TEMPLATE % (self.value, self.allowed_values)
 
   def reason(self):
     """Returns the server's reason for this error.
@@ -183,6 +182,47 @@
             'location': self.parameter_name}
 
 
+class BasicTypeParameterError(InvalidParameterError):
+  """Request rejection exception for basic types (int, float)."""
+
+  def __init__(self, parameter_name, value, type_name):
+    """Constructor for BasicTypeParameterError.
+
+    Args:
+      parameter_name: String; the name of the parameter which had a value
+        rejected.
+      value: The actual value passed in for the enum. Usually string.
+      type_name: Descriptive name of the data type expected.
+    """
+    super(BasicTypeParameterError, self).__init__(parameter_name, value)
+    self.type_name = type_name
+
+  def message(self):
+    """A descriptive message describing the error."""
+    return _INVALID_BASIC_PARAM_TEMPLATE % (self.type_name, self.value)
+
+
+
+class EnumRejectionError(InvalidParameterError):
+  """Custom request rejection exception for enum values."""
+
+  def __init__(self, parameter_name, value, allowed_values):
+    """Constructor for EnumRejectionError.
+
+    Args:
+      parameter_name: String; the name of the enum parameter which had a value
+        rejected.
+      value: The actual value passed in for the enum. Usually string.
+      allowed_values: List of strings allowed for the enum.
+    """
+    super(EnumRejectionError, self).__init__(parameter_name, value)
+    self.allowed_values = allowed_values
+
+  def message(self):
+    """A descriptive message describing the error."""
+    return _INVALID_ENUM_TEMPLATE % (self.value, self.allowed_values)
+
+
 class BackendError(RequestError):
   """Exception raised when the backend SPI returns an error code."""
 
diff --git a/google/appengine/tools/devappserver2/endpoints/parameter_converter.py b/google/appengine/tools/devappserver2/endpoints/parameter_converter.py
new file mode 100644
index 0000000..0535002
--- /dev/null
+++ b/google/appengine/tools/devappserver2/endpoints/parameter_converter.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Helper that converts parameter values to the type expected by the SPI.
+
+Parameter values that appear in the URL and the query string are usually
+converted to native types before being passed to the SPI.  This code handles
+that conversion and some validation.
+"""
+
+
+
+from google.appengine.tools.devappserver2.endpoints import errors
+
+
+__all__ = ['transform_parameter_value']
+
+
+def _check_enum(parameter_name, value, parameter_config):
+  """Checks if an enum value is valid.
+
+  This is called by the transform_parameter_value function and shouldn't be
+  called directly.
+
+  This verifies that the value of an enum parameter is valid.
+
+  Args:
+    parameter_name: A string containing the name of the parameter, which is
+      either just a variable name or the name with the index appended. For
+      example 'var' or 'var[2]'.
+    value: A string containing the value passed in for the parameter.
+    parameter_config: The dictionary containing information specific to the
+      parameter in question. This is retrieved from request.parameters in
+      the method config.
+
+  Raises:
+    EnumRejectionError: If the given value is not among the accepted
+      enum values in the field parameter.
+  """
+  enum_values = [enum['backendValue']
+                 for enum in parameter_config['enum'].values()
+                 if 'backendValue' in enum]
+  if value not in enum_values:
+    raise errors.EnumRejectionError(parameter_name, value, enum_values)
+
+
+def _check_boolean(parameter_name, value, parameter_config):
+  """Checks if a boolean value is valid.
+
+  This is called by the transform_parameter_value function and shouldn't be
+  called directly.
+
+  This checks that the string value passed in can be converted to a valid
+  boolean value.
+
+  Args:
+    parameter_name: A string containing the name of the parameter, which is
+      either just a variable name or the name with the index appended. For
+      example 'var' or 'var[2]'.
+    value: A string containing the value passed in for the parameter.
+    parameter_config: The dictionary containing information specific to the
+      parameter in question. This is retrieved from request.parameters in
+      the method config.
+
+  Raises:
+    BasicTypeParameterError: If the given value is not a valid boolean
+      value.
+  """
+  if parameter_config.get('type') != 'boolean':
+    return
+
+
+  if value.lower() not in ('1', 'true', '0', 'false'):
+    raise errors.BasicTypeParameterError(parameter_name, value, 'boolean')
+
+
+def _convert_boolean(value):
+  """Convert a string to a boolean value the same way the server does.
+
+  This is called by the transform_parameter_value function and shouldn't be
+  called directly.
+
+  Args:
+    value: A string value to be converted to a boolean.
+
+  Returns:
+    True or False, based on whether the value in the string would be interpreted
+    as true or false by the server.  In the case of an invalid entry, this
+    returns False.
+  """
+  if value.lower() in ('1', 'true'):
+    return True
+  return False
+
+
+# Map to convert parameters from strings to their desired back-end format.
+# Anything not listed here will remain a string.  Note that the server
+# keeps int64 and uint64 as strings when passed to the SPI.
+# This maps a type name from the .api method configuration to a (validation
+# function, conversion function, descriptive type name) tuple.  The
+# descriptive type name is only used in conversion error messages, and the
+# names here are chosen to match the error messages from the server.
+# Note that the 'enum' entry is special cased.  Enums have 'type': 'string',
+# so we have special case code to recognize them and use the 'enum' map
+# entry.
+_PARAM_CONVERSION_MAP = {'boolean': (_check_boolean,
+                                     _convert_boolean,
+                                     'boolean'),
+                         'int32': (None, int, 'integer'),
+                         'uint32': (None, int, 'integer'),
+                         'float': (None, float, 'float'),
+                         'double': (None, float, 'double'),
+                         'enum': (_check_enum, None, None)}
+
+
+def _get_parameter_conversion_entry(parameter_config):
+  """Get information needed to convert the given parameter to its SPI type.
+
+  Args:
+    parameter_config: The dictionary containing information specific to the
+      parameter in question. This is retrieved from request.parameters in the
+      method config.
+
+  Returns:
+    The entry from _PARAM_CONVERSION_MAP with functions/information needed to
+    validate and convert the given parameter from a string to the type expected
+    by the SPI.
+  """
+  entry = _PARAM_CONVERSION_MAP.get(parameter_config.get('type'))
+
+  # Special handling for enum parameters.  An enum's type is 'string', so we
+  # need to detect them by the presence of an 'enum' property in their
+  # configuration.
+  if entry is None and 'enum' in parameter_config:
+    entry = _PARAM_CONVERSION_MAP['enum']
+
+  return entry
+
+
+def transform_parameter_value(parameter_name, value, parameter_config):
+  """Validates and transforms parameters to the type expected by the SPI.
+
+  If the value is a list this will recursively call _transform_parameter_value
+  on the values in the list. Otherwise, it checks all parameter rules for the
+  the current value and converts its type from a string to whatever format
+  the SPI expects.
+
+  In the list case, '[index-of-value]' is appended to the parameter name for
+  error reporting purposes.
+
+  Args:
+    parameter_name: A string containing the name of the parameter, which is
+      either just a variable name or the name with the index appended, in the
+      recursive case. For example 'var' or 'var[2]'.
+    value: A string or list of strings containing the value(s) passed in for
+      the parameter.  These are the values from the request, to be validated,
+      transformed, and passed along to the SPI.
+    parameter_config: The dictionary containing information specific to the
+      parameter in question. This is retrieved from request.parameters in the
+      method config.
+
+  Returns:
+    The converted parameter value(s).  Not all types are converted, so this
+    may be the same string that's passed in.
+  """
+  if isinstance(value, list):
+    # We're only expecting to handle path and query string parameters here.
+    # The way path and query string parameters are passed in, they'll likely
+    # only be single values or singly-nested lists (no lists nested within
+    # lists).  But even if there are nested lists, we'd want to preserve that
+    # structure.  These recursive calls should preserve it and convert all
+    # parameter values.  See the docstring for information about the parameter
+    # renaming done here.
+    return [transform_parameter_value('%s[%d]' % (parameter_name, index),
+                                      element, parameter_config)
+            for index, element in enumerate(value)]
+
+  # Validate and convert the parameter value.
+  entry = _get_parameter_conversion_entry(parameter_config)
+  if entry:
+    validation_func, conversion_func, type_name = entry
+    if validation_func:
+      validation_func(parameter_name, value, parameter_config)
+    if conversion_func:
+      try:
+        return conversion_func(value)
+      except ValueError:
+        raise errors.BasicTypeParameterError(parameter_name, value, type_name)
+
+  return value
diff --git a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
index f93f6ca..bb346b4 100644
--- a/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
+++ b/google/appengine/tools/devappserver2/endpoints/testdata/test_service.py
@@ -64,6 +64,12 @@
   def test(self, unused_request):
     return TestResponse(text='Test response')
 
+  @endpoints.method(message_types.VoidMessage, TestResponse,
+                    http_method='GET', name='empty_test', path='empty_test',
+                    scopes=[])
+  def empty_test(self, unused_request):
+    return TestResponse()
+
   @endpoints.method(TestRequest, TestResponse,
                     http_method='POST', name='t2name', path='t2path',
                     scopes=[])
@@ -99,6 +105,11 @@
         var_uint64=request.var_uint64 + 1)
     return response
 
+  @endpoints.method(message_types.VoidMessage, message_types.VoidMessage,
+                    path='empty_response', http_method='GET', scopes=[])
+  def empty_response(self, unused_request):
+    return message_types.VoidMessage()
+
 
 @my_api.api_class(resource_name='extraname', path='extrapath')
 class ExtraMethods(remote.Service):
diff --git a/google/appengine/tools/devappserver2/endpoints/util.py b/google/appengine/tools/devappserver2/endpoints/util.py
index 568773b..ceb644a 100644
--- a/google/appengine/tools/devappserver2/endpoints/util.py
+++ b/google/appengine/tools/devappserver2/endpoints/util.py
@@ -46,6 +46,10 @@
                             start_response, cors_handler=cors_handler)
 
 
+def send_wsgi_no_content_response(start_response, cors_handler=None):
+  return send_wsgi_response('204', [], '', start_response, cors_handler)
+
+
 def send_wsgi_response(status, headers, content, start_response,
                        cors_handler=None):
   """Dump reformatted response to CGI start_response.
diff --git a/google/appengine/tools/devappserver2/endpoints/util_test.py b/google/appengine/tools/devappserver2/endpoints/util_test.py
index 04cdc67..9a41cda 100644
--- a/google/appengine/tools/devappserver2/endpoints/util_test.py
+++ b/google/appengine/tools/devappserver2/endpoints/util_test.py
@@ -43,5 +43,9 @@
                            [('Location', 'http://www.google.com'),
                             ('Content-Length', '0')], '')
 
+  def test_send_wsgi_no_content_response(self):
+    response = util.send_wsgi_no_content_response(self.start_response)
+    self.assert_http_match(response, 204, [('Content-Length', '0')], '')
+
 if __name__ == '__main__':
   unittest.main()
diff --git a/google/appengine/tools/devappserver2/http_runtime.py b/google/appengine/tools/devappserver2/http_runtime.py
index 5455c53..fc27406 100644
--- a/google/appengine/tools/devappserver2/http_runtime.py
+++ b/google/appengine/tools/devappserver2/http_runtime.py
@@ -14,7 +14,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Serves content for "script" handlers using an HTTP runtime."""
+"""Serves content for "script" handlers using an HTTP runtime.
+
+http_runtime supports two ways to start the runtime instance.
+
+START_PROCESS sends the runtime_config protobuf (serialized and base64 encoded
+as not all platforms support binary data over stdin) to the runtime instance
+over stdin and requires the runtime instance to send the port it is listening on
+over stdout.
+
+START_PROCESS_FILE creates two temporary files and adds the paths of both files
+to the runtime instance command line. The first file is written by http_runtime
+with the runtime_config proto (serialized); the runtime instance is expected to
+delete the file after reading it. The second file is written by the runtime
+instance with the port it is listening on (the line must be newline terminated);
+http_runtime is expected to delete the file after reading it.
+
+TODO: convert all runtimes to START_PROCESS_FILE.
+"""
 
 
 import base64
@@ -37,12 +54,73 @@
 from google.appengine.tools.devappserver2 import tee
 from google.appengine.tools.devappserver2 import util
 
+START_PROCESS = -1
+START_PROCESS_FILE = -2
+
+
+def _sleep_between_retries(attempt, max_attempts, sleep_base):
+  """Sleep between retry attempts.
+
+  Do an exponential backoff between retry attempts on an operation. The general
+  pattern for use is:
+    for attempt in range(max_attempts):
+      # Try operation, either return or break on success
+      _sleep_between_retries(attempt, max_attempts, sleep_base)
+
+  Args:
+    attempt: Which attempt just failed (0 based).
+    max_attempts: The maximum number of attempts that will be made.
+    sleep_base: How long in seconds to sleep between the first and second
+      attempt (the time will be doubled between each successive attempt). The
+      value may be any numeric type that is convertible to float (complex
+      won't work but user types that are sufficiently numeric-like will).
+  """
+  # Don't sleep after the last attempt as we're about to give up.
+  if attempt < (max_attempts - 1):
+    time.sleep((2 ** attempt) * sleep_base)
+
+
+def _remove_retry_sharing_violation(path, max_attempts=10, sleep_base=.125):
+  """Removes a file (with retries on Windows for sharing violations).
+
+  Args:
+    path: The filesystem path to remove.
+    max_attempts: The maximum number of attempts to try to remove the path
+      before giving up.
+    sleep_base: How long in seconds to sleep between the first and second
+      attempt (the time will be doubled between each successive attempt). The
+      value may be any numeric type that is convertible to float (complex
+      won't work but user types that are sufficiently numeric-like will).
+
+  Raises:
+    WindowsError: When an error other than a sharing violation occurs.
+  """
+  if sys.platform == 'win32':
+    for attempt in range(max_attempts):
+      try:
+        os.remove(path)
+        break
+      except WindowsError as e:
+        import winerror
+        # Sharing violations are expected to occasionally occur when the runtime
+        # instance is context swapped after writing the port but before closing
+        # the file. Ignore these and try again.
+        if e.winerror != winerror.ERROR_SHARING_VIOLATION:
+          raise
+      _sleep_between_retries(attempt, max_attempts, sleep_base)
+    else:
+      logging.warn('Unable to delete %s', path)
+  else:
+    os.remove(path)
+
 
 class HttpRuntimeProxy(instance.RuntimeProxy):
   """Manages a runtime subprocess used to handle dynamic content."""
 
+  _VALID_START_PROCESS_FLAVORS = [START_PROCESS, START_PROCESS_FILE]
+
   def __init__(self, args, runtime_config_getter, module_configuration,
-               env=None):
+               env=None, start_process_flavor=START_PROCESS):
     """Initializer for HttpRuntimeProxy.
 
     Args:
@@ -54,6 +132,12 @@
           instance respresenting the configuration of the module that owns the
           runtime.
       env: A dict of environment variables to pass to the runtime subprocess.
+      start_process_flavor: Which version of start process to start your
+        runtime process. SUpported flavors are START_PROCESS and
+        START_PROCESS_FILE.
+
+    Raises:
+      ValueError: An unknown value for start_process_flavor was used.
     """
     super(HttpRuntimeProxy, self).__init__()
     self._host = 'localhost'
@@ -66,6 +150,9 @@
     self._args = args
     self._module_configuration = module_configuration
     self._env = env
+    if start_process_flavor not in self._VALID_START_PROCESS_FLAVORS:
+      raise ValueError('Invalid start_process_flavor.')
+    self._start_process_flavor = start_process_flavor
 
   def _get_error_file(self):
     for error_handler in self._module_configuration.error_handlers or []:
@@ -232,23 +319,79 @@
                     ('Content-Length', str(len(message)))])
     return message
 
+  def _read_start_process_file(self, max_attempts=10, sleep_base=.125):
+    """Read the single line response expected in the start process file.
+
+    The START_PROCESS_FILE flavor uses a file for the runtime instance to
+    report back the port it is listening on. We can't rely on EOF semantics
+    as that is a race condition when the runtime instance is simultaneously
+    writing the file while the devappserver process is reading it; rather we
+    rely on the line being terminated with a newline.
+
+    Args:
+      max_attempts: The maximum number of attempts to read the line.
+      sleep_base: How long in seconds to sleep between the first and second
+        attempt (the time will be doubled between each successive attempt). The
+        value may be any numeric type that is convertible to float (complex
+        won't work but user types that are sufficiently numeric-like will).
+
+    Returns:
+      If a full single line (as indicated by a newline terminator) is found, all
+      data read up to that point is returned; return an empty string if no
+      newline is read before the process exits or the max number of attempts are
+      made.
+    """
+    try:
+      for attempt in range(max_attempts):
+        # Yes, the final data may already be in the file even though the
+        # process exited. That said, since the process should stay alive
+        # if it's exited we don't care anyway.
+        if self._process.poll() is not None:
+          return ''
+        # On Mac, if the first read in this process occurs before the data is
+        # written, no data will ever be read by this process without the seek.
+        self._process.child_out.seek(0)
+        line = self._process.child_out.read()
+        if '\n' in line:
+          return line
+        _sleep_between_retries(attempt, max_attempts, sleep_base)
+    finally:
+      self._process.child_out.close()
+    return ''
+
   def start(self):
     """Starts the runtime process and waits until it is ready to serve."""
     runtime_config = self._runtime_config_getter()
-    serialized_config = base64.b64encode(runtime_config.SerializeToString())
     # TODO: Use a different process group to isolate the child process
     # from signals sent to the parent. Only available in subprocess in
     # Python 2.7.
-    with self._process_lock:
-      assert not self._process, 'start() can only be called once'
-      self._process = safe_subprocess.start_process(
-          self._args,
-          serialized_config,
-          stdout=subprocess.PIPE,
-          stderr=subprocess.PIPE,
-          env=self._env,
-          cwd=self._module_configuration.application_root)
-    line = self._process.stdout.readline()
+    assert self._start_process_flavor in self._VALID_START_PROCESS_FLAVORS
+    if self._start_process_flavor == START_PROCESS:
+      serialized_config = base64.b64encode(runtime_config.SerializeToString())
+      with self._process_lock:
+        assert not self._process, 'start() can only be called once'
+        self._process = safe_subprocess.start_process(
+            self._args,
+            serialized_config,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            env=self._env,
+            cwd=self._module_configuration.application_root)
+      line = self._process.stdout.readline()
+    elif self._start_process_flavor == START_PROCESS_FILE:
+      serialized_config = runtime_config.SerializeToString()
+      with self._process_lock:
+        assert not self._process, 'start() can only be called once'
+        self._process = safe_subprocess.start_process_file(
+            args=self._args,
+            input_string=serialized_config,
+            env=self._env,
+            cwd=self._module_configuration.application_root,
+            stderr=subprocess.PIPE)
+      line = self._read_start_process_file()
+      _remove_retry_sharing_violation(self._process.child_out.name)
+
+    # _stderr_tee may be pre-set by unit tests.
     if self._stderr_tee is None:
       self._stderr_tee = tee.Tee(self._process.stderr, sys.stderr)
       self._stderr_tee.start()
@@ -283,4 +426,8 @@
         self._process.kill()
       except OSError:
         pass
+      # Mac leaks file descriptors without call to join. Suspect a race
+      # condition where the interpreter is unable to close the subprocess pipe
+      # as the thread hasn't returned from the readline call.
+      self._stderr_tee.join(5)
       self._process = None
diff --git a/google/appengine/tools/devappserver2/http_runtime_test.py b/google/appengine/tools/devappserver2/http_runtime_test.py
index f4d7dfb..64d5bdf 100644
--- a/google/appengine/tools/devappserver2/http_runtime_test.py
+++ b/google/appengine/tools/devappserver2/http_runtime_test.py
@@ -26,6 +26,7 @@
 import socket
 import subprocess
 import tempfile
+import time
 import unittest
 
 import google
@@ -86,6 +87,9 @@
   def get_buf(self):
     return self.buf
 
+  def join(self, unused_timeout):
+    pass
+
 
 class ModuleConfigurationStub(object):
   def __init__(self, application_root='/tmp', error_handlers=None):
@@ -617,5 +621,183 @@
     self.mox.VerifyAll()
 
 
+class HttpRuntimeProxyFileFlavorTest(wsgi_test_utils.WSGITestCase):
+  def setUp(self):
+    self.mox = mox.Mox()
+    self.tmpdir = tempfile.mkdtemp()
+    module_configuration = ModuleConfigurationStub(application_root=self.tmpdir)
+    self.runtime_config = runtime_config_pb2.Config()
+    self.runtime_config.app_id = 'app'
+    self.runtime_config.version_id = 'version'
+    self.runtime_config.api_port = 12345
+    self.runtime_config.application_root = self.tmpdir
+    self.runtime_config.datacenter = 'us1'
+    self.runtime_config.instance_id = 'abc3dzac4'
+    self.runtime_config.auth_domain = 'gmail.com'
+    self.runtime_config_getter = lambda: self.runtime_config
+    self.proxy = http_runtime.HttpRuntimeProxy(
+        ['/runtime'], self.runtime_config_getter, module_configuration,
+        env={'foo': 'bar'},
+        start_process_flavor=http_runtime.START_PROCESS_FILE)
+    self.proxy._port = 23456
+    self.mox.StubOutWithMock(self.proxy, '_process_lock')
+    self.process = self.mox.CreateMock(subprocess.Popen)
+    self.process.stdin = self.mox.CreateMockAnything()
+    self.process.stdout = self.mox.CreateMockAnything()
+    self.process.stderr = self.mox.CreateMockAnything()
+    self.process.child_out = self.mox.CreateMockAnything()
+    self.mox.StubOutWithMock(safe_subprocess, 'start_process_file')
+    self.mox.StubOutWithMock(httplib.HTTPConnection, 'connect')
+    self.mox.StubOutWithMock(httplib.HTTPConnection, 'request')
+    self.mox.StubOutWithMock(httplib.HTTPConnection, 'getresponse')
+    self.mox.StubOutWithMock(httplib.HTTPConnection, 'close')
+    self.mox.StubOutWithMock(os, 'remove')
+    self.mox.StubOutWithMock(time, 'sleep')
+    self.url_map = appinfo.URLMap(url=r'/(get|post).*',
+                                  script=r'\1.py')
+
+  def tearDown(self):
+    shutil.rmtree(self.tmpdir)
+    self.mox.UnsetStubs()
+
+  def test_basic(self):
+    """Basic functionality test of START_PROCESS_FILE flavor."""
+    # start()
+    # As the lock is mocked out, this provides a mox expectation.
+    with self.proxy._process_lock:
+      safe_subprocess.start_process_file(
+          args=['/runtime'],
+          input_string=self.runtime_config.SerializeToString(),
+          env={'foo': 'bar'},
+          cwd=self.tmpdir,
+          stderr=subprocess.PIPE).AndReturn(self.process)
+    self.process.poll().AndReturn(None)
+    self.process.child_out.seek(0).AndReturn(None)
+    self.process.child_out.read().AndReturn('1234\n')
+    self.process.child_out.close().AndReturn(None)
+    self.process.child_out.name = '/tmp/c-out.ABC'
+    os.remove('/tmp/c-out.ABC').AndReturn(None)
+    self.proxy._stderr_tee = FakeTee('')
+
+    # _can_connect() via start().
+    httplib.HTTPConnection.connect()
+    httplib.HTTPConnection.close()
+
+    self.mox.ReplayAll()
+    self.proxy.start()
+    self.assertEquals(1234, self.proxy._port)
+    self.mox.VerifyAll()
+
+  def test_slow_shattered(self):
+    """The port number is received slowly in chunks."""
+    # start()
+    # As the lock is mocked out, this provides a mox expectation.
+    with self.proxy._process_lock:
+      safe_subprocess.start_process_file(
+          args=['/runtime'],
+          input_string=self.runtime_config.SerializeToString(),
+          env={'foo': 'bar'},
+          cwd=self.tmpdir,
+          stderr=subprocess.PIPE).AndReturn(self.process)
+    for response, sleeptime in [
+        ('', .125), ('43', .25), ('4321', .5), ('4321\n', None)]:
+      self.process.poll().AndReturn(None)
+      self.process.child_out.seek(0).AndReturn(None)
+      self.process.child_out.read().AndReturn(response)
+      if sleeptime is not None:
+        time.sleep(sleeptime).AndReturn(None)
+    self.process.child_out.close().AndReturn(None)
+    self.process.child_out.name = '/tmp/c-out.ABC'
+    os.remove('/tmp/c-out.ABC').AndReturn(None)
+    self.proxy._stderr_tee = FakeTee('')
+
+    # _can_connect() via start().
+    httplib.HTTPConnection.connect()
+    httplib.HTTPConnection.close()
+
+    self.mox.ReplayAll()
+    self.proxy.start()
+    self.assertEquals(4321, self.proxy._port)
+    self.mox.VerifyAll()
+
+  def test_runtime_instance_dies_immediately(self):
+    """Runtime instance dies without sending a port."""
+    # start()
+    # As the lock is mocked out, this provides a mox expectation.
+    with self.proxy._process_lock:
+      safe_subprocess.start_process_file(
+          args=['/runtime'],
+          input_string=self.runtime_config.SerializeToString(),
+          env={'foo': 'bar'},
+          cwd=self.tmpdir,
+          stderr=subprocess.PIPE).AndReturn(self.process)
+    self.process.poll().AndReturn(1)
+    self.process.child_out.close().AndReturn(None)
+    self.process.child_out.name = '/tmp/c-out.ABC'
+    os.remove('/tmp/c-out.ABC').AndReturn(None)
+    header = "bad runtime process port ['']\n\n"
+    stderr0 = 'Go away..\n'
+    self.proxy._stderr_tee = FakeTee(stderr0)
+    time.sleep(.1).AndReturn(None)
+
+    self.mox.ReplayAll()
+    self.proxy.start()
+    expected_headers = {
+        'Content-Type': 'text/plain',
+        'Content-Length': str(len(header) + len(stderr0)),
+    }
+    self.assertResponse('500 Internal Server Error', expected_headers,
+                        header + stderr0,
+                        self.proxy.handle, {},
+                        url_map=self.url_map,
+                        match=re.match(self.url_map.url, '/get%20request'),
+                        request_id='request id',
+                        request_type=instance.NORMAL_REQUEST)
+    self.mox.VerifyAll()
+
+  def test_runtime_instance_invalid_response(self):
+    """Runtime instance does not terminate port with a newline."""
+    # start()
+    # As the lock is mocked out, this provides a mox expectation.
+    with self.proxy._process_lock:
+      safe_subprocess.start_process_file(
+          args=['/runtime'],
+          input_string=self.runtime_config.SerializeToString(),
+          env={'foo': 'bar'},
+          cwd=self.tmpdir,
+          stderr=subprocess.PIPE).AndReturn(self.process)
+    for response, sleeptime in [
+        ('30000', .125), ('30000', .25), ('30000', .5), ('30000', 1.0),
+        ('30000', 2.0), ('30000', 4.0), ('30000', 8.0), ('30000', 16.0),
+        ('30000', 32.0), ('30000', None)]:
+      self.process.poll().AndReturn(None)
+      self.process.child_out.seek(0).AndReturn(None)
+      self.process.child_out.read().AndReturn(response)
+      if sleeptime is not None:
+        time.sleep(sleeptime).AndReturn(None)
+    self.process.child_out.close().AndReturn(None)
+    self.process.child_out.name = '/tmp/c-out.ABC'
+    os.remove('/tmp/c-out.ABC').AndReturn(None)
+    header = "bad runtime process port ['']\n\n"
+    stderr0 = 'Go away..\n'
+    self.proxy._stderr_tee = FakeTee(stderr0)
+    time.sleep(.1)
+
+    self.mox.ReplayAll()
+    self.proxy.start()
+    expected_headers = {
+        'Content-Type': 'text/plain',
+        'Content-Length': str(len(header) + len(stderr0)),
+    }
+    self.assertResponse('500 Internal Server Error', expected_headers,
+                        header + stderr0,
+                        self.proxy.handle, {},
+                        url_map=self.url_map,
+                        match=re.match(self.url_map.url, '/get%20request'),
+                        request_id='request id',
+                        request_type=instance.NORMAL_REQUEST)
+    self.mox.VerifyAll()
+
+
 if __name__ == '__main__':
   unittest.main()
diff --git a/google/appengine/tools/devappserver2/module.py b/google/appengine/tools/devappserver2/module.py
index 401fc75..90bf85f 100644
--- a/google/appengine/tools/devappserver2/module.py
+++ b/google/appengine/tools/devappserver2/module.py
@@ -406,15 +406,16 @@
     self._python_config = python_config
     self._cloud_sql_config = cloud_sql_config
     self._request_data = request_data
-    # _create_instance_factory() transitively calls _get_runtime_config, which
-    # uses self._allow_skipped_files.
     self._allow_skipped_files = allow_skipped_files
-    self._instance_factory = self._create_instance_factory(
-        self._module_configuration)
     self._dispatcher = dispatcher
     self._max_instances = max_instances
     self._automatic_restarts = automatic_restarts
     self._use_mtime_file_watcher = use_mtime_file_watcher
+    self._default_version_port = default_version_port
+    self._port_registry = port_registry
+
+    self._instance_factory = self._create_instance_factory(
+        self._module_configuration)
     if self._automatic_restarts:
       self._watcher = file_watcher.get_file_watcher(
           [self._module_configuration.application_root] +
@@ -424,9 +425,6 @@
       self._watcher = None
     self._handler_lock = threading.Lock()
     self._handlers = self._create_url_handlers()
-    self._default_version_port = default_version_port
-    self._port_registry = port_registry
-
     self._balanced_module = wsgi_server.WsgiServer(
         (self._host, self._balanced_port), self)
     self._quit_event = threading.Event()  # Set when quit() has been called.
diff --git a/google/appengine/tools/devappserver2/php/runtime.py b/google/appengine/tools/devappserver2/php/runtime.py
index b479c05..8a5e834 100644
--- a/google/appengine/tools/devappserver2/php/runtime.py
+++ b/google/appengine/tools/devappserver2/php/runtime.py
@@ -97,11 +97,17 @@
     user_environ['REMOTE_REQUEST_ID'] = environ[
         http_runtime_constants.REQUEST_ID_ENVIRON]
 
+    # Pass the APPLICATION_ROOT so we can use it in the setup script. We will
+    # remove it from the environment before we execute the user script.
+    user_environ['APPLICATION_ROOT'] = self.config.application_root
+
     if 'CONTENT_TYPE' in environ:
       user_environ['CONTENT_TYPE'] = environ['CONTENT_TYPE']
+      user_environ['HTTP_CONTENT_TYPE'] = environ['CONTENT_TYPE']
 
     if 'CONTENT_LENGTH' in environ:
       user_environ['CONTENT_LENGTH'] = environ['CONTENT_LENGTH']
+      user_environ['HTTP_CONTENT_LENGTH'] = environ['CONTENT_LENGTH']
       content = environ['wsgi.input'].read(int(environ['CONTENT_LENGTH']))
     else:
       content = None
@@ -125,19 +131,27 @@
       args.extend(['-d', 'xdebug.remote_enable="1"'])
       user_environ['XDEBUG_CONFIG'] = os.environ.get('XDEBUG_CONFIG', '')
 
-    p = subprocess.Popen(args,
-                         stdin=subprocess.PIPE,
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.PIPE,
-                         env=user_environ,
-                         cwd=self.config.application_root)
-    stdout, stderr = p.communicate(content)
-
-    if p.returncode:
-      logging.error('php failure (%r) with:\n%s', p.returncode, stdout+stderr)
+    try:
+      p = subprocess.Popen(args,
+                           stdin=subprocess.PIPE,
+                           stdout=subprocess.PIPE,
+                           stderr=subprocess.PIPE,
+                           env=user_environ,
+                           cwd=self.config.application_root)
+      stdout, stderr = p.communicate(content)
+    except Exception as e:
+      logging.exception('Failure to start PHP with: %s', args)
       start_response('500 Internal Server Error',
                      [(http_runtime_constants.ERROR_CODE_HEADER, '1')])
-      return []
+      return ['Failure to start the PHP subprocess with %r:\n%s' % (args, e)]
+
+    if p.returncode:
+      logging.error('php failure (%r) with:\nstdout:\n%sstderr:\n%s',
+                    p.returncode, stdout, stderr)
+      start_response('500 Internal Server Error',
+                     [(http_runtime_constants.ERROR_CODE_HEADER, '1')])
+      return ['php failure (%r) with:\nstdout:%s\nstderr:\n%s' %
+              (p.returncode, stdout, stderr)]
 
     message = httplib.HTTPMessage(cStringIO.StringIO(stdout))
     assert 'Content-Type' in message, 'invalid CGI response: %r' % stdout
diff --git a/google/appengine/tools/devappserver2/php/setup.php b/google/appengine/tools/devappserver2/php/setup.php
index 2a8a183..973c95e 100644
--- a/google/appengine/tools/devappserver2/php/setup.php
+++ b/google/appengine/tools/devappserver2/php/setup.php
@@ -1,14 +1,34 @@
 <?php
 
-
-
+function _gae_syslog($priority, $format_string, $message) {
+  // TODO(bquinlan): Use the logs service to persist this message.
+}
 
 $setup = function() {
-  $allowed_buckets = ini_get(
-      'google_app_engine.allow_include_gs_buckets');
-  define('GAE_INCLUDE_REQUIRE_GS_STREAMS',
-         // All values are considered true except the empty string.
-         $allowed_buckets ? 1 : 0);
+  $setupGaeExtension = function() {
+    $allowed_buckets = '';
+    $ini_file = getenv('APPLICATION_ROOT') . DIRECTORY_SEPARATOR . 'php.ini';
+    $config_values = @parse_ini_file($ini_file);
+    if ($config_values &&
+        array_key_exists('google_app_engine.allow_include_gs_buckets',
+                         $config_values)) {
+      $allowed_buckets =
+          $config_values['google_app_engine.allow_include_gs_buckets'];
+    }
+    define('GAE_INCLUDE_REQUIRE_GS_STREAMS',
+           // All values are considered true except the empty string.
+           $allowed_buckets ? 1 : 0);
+    define('GAE_INCLUDE_GS_BUCKETS', $allowed_buckets);
+
+    unset($_ENV['APPLICATION_ROOT']);
+    unset($_SERVER['APPLICATION_ROOT']);
+  };
+
+  $configureDefaults = function() {
+    if (!ini_get('date.timezone')) {
+      date_default_timezone_set('UTC');
+    }
+  };
 
   $updateScriptFilename = function() {
     putenv('SCRIPT_FILENAME=' . getenv('REAL_SCRIPT_FILENAME'));
@@ -44,6 +64,8 @@
   $setupBuiltins = function() {
     require_once 'google/appengine/runtime/Setup.php';
   };
+  $setupGaeExtension();
+  $configureDefaults();
   $updateScriptFilename();
   $setupApiProxy();
   $setupBuiltins();
diff --git a/google/appengine/tools/devappserver2/python/pdb_sandbox.py b/google/appengine/tools/devappserver2/python/pdb_sandbox.py
new file mode 100644
index 0000000..c911b96
--- /dev/null
+++ b/google/appengine/tools/devappserver2/python/pdb_sandbox.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Modify pdb to work with the devappserver2 sandbox."""
+
+import sys
+
+def install():
+  """Install the necessary changes to pdb.
+
+  Monkeypatch pdb so that it can be used in the devappserver sandbox. Must
+  be called after the sandbox has been installed but before stdin/stdout
+  objects have been reassigned.
+  """
+  # Import here (i.e. after sandbox installed) to get the post sandbox pdb.
+  # Extremely important so that we monkeypatch the same pdb the apps can
+  # import.
+  import pdb as pdb_postsandbox
+
+  # Save stdin/stdout as the references will not be available when user
+  # code runs.
+  real_stdin = sys.stdin
+  real_stdout = sys.stdout
+
+  # Capture the original Pdb so we can forward the __init__ call after
+  # monkeypatching (if not captured, forwarding the call results in infinite
+  # recursion).
+  pdb_premonkeypatch = pdb_postsandbox.Pdb
+
+  class _Pdb(pdb_postsandbox.Pdb):
+    # TODO: improve argument handling so if new arguments are added
+    # in the future or the defaults change, this does not need to be updated.
+    def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None):
+      if stdin is None:
+        stdin = real_stdin
+      if stdout is None:
+        stdout = real_stdout
+      # Pdb is old style class so no super().
+      pdb_premonkeypatch.__init__(self, completekey, stdin, stdout, skip)
+
+  pdb_postsandbox.Pdb = _Pdb
diff --git a/google/appengine/tools/devappserver2/python/request_handler.py b/google/appengine/tools/devappserver2/python/request_handler.py
index 8b52d01..275aa0b 100644
--- a/google/appengine/tools/devappserver2/python/request_handler.py
+++ b/google/appengine/tools/devappserver2/python/request_handler.py
@@ -33,6 +33,7 @@
 
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import appinfo
 from google.appengine.api.logservice import log_service_pb
 from google.appengine.api.logservice import logservice
 from google.appengine.ext.remote_api import remote_api_stub
@@ -51,9 +52,16 @@
 
   def __init__(self, config):
     self.config = config
+    if appinfo.MODULE_SEPARATOR not in config.version_id:
+      module_id = appinfo.DEFAULT_MODULE
+      version_id = config.version_id
+    else:
+      module_id, version_id = config.version_id.split(appinfo.MODULE_SEPARATOR)
+
     self.environ_template = {
         'APPLICATION_ID': config.app_id,
-        'CURRENT_VERSION_ID': config.version_id,
+        'CURRENT_MODULE_ID': module_id,
+        'CURRENT_VERSION_ID': version_id,
         'DATACENTER': config.datacenter.encode('ascii'),
         'INSTANCE_ID': config.instance_id.encode('ascii'),
         'APPENGINE_RUNTIME': 'python27',
diff --git a/google/appengine/tools/devappserver2/python/runtime.py b/google/appengine/tools/devappserver2/python/runtime.py
index 20f9ce1..bde0e02 100644
--- a/google/appengine/tools/devappserver2/python/runtime.py
+++ b/google/appengine/tools/devappserver2/python/runtime.py
@@ -17,7 +17,6 @@
 """A Python devappserver2 runtime."""
 
 
-import base64
 import os
 import sys
 import time
@@ -110,9 +109,30 @@
         traceback=self._formatted_traceback)
 
 
+class AutoFlush(object):
+  def __init__(self, stream):
+    self.stream = stream
+
+  def write(self, data):
+    self.stream.write(data)
+    self.stream.flush()
+
+  def __getattr__(self, attr):
+    return getattr(self.stream, attr)
+
+
 def main():
+  # Required so PDB prompts work properly. Originally tried to disable buffering
+  # (both by adding the -u flag when starting this process and by adding
+  # "stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)" but neither worked).
+  sys.stdout = AutoFlush(sys.stdout)
+  assert len(sys.argv) == 3
+  child_in_path = sys.argv[1]
+  child_out_path = sys.argv[2]
   config = runtime_config_pb2.Config()
-  config.ParseFromString(base64.b64decode(sys.stdin.read()))
+  config.ParseFromString(open(child_in_path, 'rb').read())
+  os.remove(child_in_path)
+  child_out = open(child_out_path, 'wb')
   debugging_app = None
   if config.python_config and config.python_config.startup_script:
     global_vars = {'config': config}
@@ -139,9 +159,8 @@
         request_rewriter.runtime_rewriter_middleware(
             request_handler.RequestHandler(config)))
   server.start()
-  print server.port
-  sys.stdout.close()
-  sys.stdout = sys.stderr
+  print >>child_out, server.port
+  child_out.close()
   try:
     while True:
       time.sleep(1)
diff --git a/google/appengine/tools/devappserver2/python/sandbox.py b/google/appengine/tools/devappserver2/python/sandbox.py
index 7aaef34..8fad0cf 100644
--- a/google/appengine/tools/devappserver2/python/sandbox.py
+++ b/google/appengine/tools/devappserver2/python/sandbox.py
@@ -35,6 +35,7 @@
 from google.appengine import dist27 as dist27
 from google.appengine.ext.remote_api import remote_api_stub
 from google.appengine.runtime import request_environment
+from google.appengine.tools.devappserver2.python import pdb_sandbox
 from google.appengine.tools.devappserver2.python import request_state
 from google.appengine.tools.devappserver2.python import stubs
 
@@ -49,6 +50,7 @@
     'pycrypto': re.compile(r'Crypto(\..*)?$'),
     'lxml': re.compile(r'lxml(\..*)?$'),
     'PIL': re.compile(r'(PIL(\..*)?|_imaging|_imagingft|_imagingmath)$'),
+    'ssl': re.compile(r'_ssl$'),
 }
 
 # Maps App Engine third-party library names to the Python package name for
@@ -111,6 +113,7 @@
     config: The runtime_config_pb2.Config to use to configure the sandbox.
   """
 
+  devnull = open(os.path.devnull)
   modules = [os, traceback, google, protorpc]
   c_module = _find_shared_object_c_module()
   if c_module:
@@ -170,6 +173,9 @@
   request_environment.PatchOsEnviron(sandboxed_os)
   os.__dict__.update(sandboxed_os.__dict__)
   _init_logging(config.stderr_log_level)
+  pdb_sandbox.install()
+  sys.stdin = devnull
+  sys.stdout = sys.stderr
 
 
 def _find_shared_object_c_module():
diff --git a/google/appengine/tools/devappserver2/python_runtime.py b/google/appengine/tools/devappserver2/python_runtime.py
index 1e45b82..7062af2 100644
--- a/google/appengine/tools/devappserver2/python_runtime.py
+++ b/google/appengine/tools/devappserver2/python_runtime.py
@@ -87,7 +87,8 @@
         _RUNTIME_ARGS,
         instance_config_getter,
         self._module_configuration,
-        env=dict(os.environ, PYTHONHASHSEED='random'))
+        env=dict(os.environ, PYTHONHASHSEED='random'),
+        start_process_flavor=http_runtime.START_PROCESS_FILE)
     return instance.Instance(self.request_data,
                              instance_id,
                              proxy,
diff --git a/google/appengine/tools/devappserver2/safe_subprocess.py b/google/appengine/tools/devappserver2/safe_subprocess.py
index d267272..c35762e 100644
--- a/google/appengine/tools/devappserver2/safe_subprocess.py
+++ b/google/appengine/tools/devappserver2/safe_subprocess.py
@@ -19,6 +19,7 @@
 import logging
 import subprocess
 import sys
+import tempfile
 import threading
 
 # Subprocess creation is not threadsafe in Python. See
@@ -68,3 +69,56 @@
     p.stdin.close()
     p.stdin = None
   return p
+
+
+def start_process_file(args, input_string, env, cwd, stdin=None, stdout=None,
+                       stderr=None):
+  """Starts a subprocess thread safely with temporary files for communication.
+
+  An alternate version of start_process that allows for the preservation
+  of stdin and stdout by creating two files that can be used for communication
+  between the processes. The paths to these files are added to the command
+  line after any args provided by the caller. The first file is written with
+  the value of input_string and the second file is returned to the caller.
+
+  Args:
+    args: A string or sequence of strings containing the program arguments.
+    input_string: A string to pass to stdin of the subprocess.
+    env: A dict containing environment variables for the subprocess.
+    cwd: A string containing the directory to switch to before executing the
+        subprocess.
+    stdin: A file descriptor, file object or subprocess.PIPE to use for the
+        stdin descriptor for the subprocess.
+    stdout: A file descriptor, file object or subprocess.PIPE to use for the
+        stdout descriptor for the subprocess.
+    stderr: A file descriptor, file object or subprocess.PIPE to use for the
+        stderr descriptor for the subprocess.
+
+  Returns:
+    A subprocess.Popen instance for the created subprocess. In addition to
+    the standard attributes, an additional child_out attribute is attached
+    that references a NamedTemporaryFile that the child process may write
+    and this process may read; it is up to the caller to delete the file
+    (path available as p.child_out.name).
+  """
+  # In addition to needing to control deletion time, we need delete=False
+  # in order to allow multiple files to open the process on Windows.
+  child_in = tempfile.NamedTemporaryFile(mode='wb', delete=False)
+  child_out = tempfile.NamedTemporaryFile(mode='rb', delete=False)
+
+  child_in.write(input_string)
+  child_in.close()
+
+  # pylint: disable=g-no-augmented-assignment
+  # += modifies the original args which we don't want.
+  args = args + [child_in.name, child_out.name]
+
+  with _popen_lock:
+    logging.debug('Starting process %r with input=%r, env=%r, cwd=%r',
+                  args, input_string, env, cwd)
+    p = subprocess.Popen(args, env=env, cwd=cwd, stdin=stdin, stdout=stdout,
+                         stderr=stderr)
+
+  p.child_out = child_out
+  return p
+
diff --git a/google/appengine/tools/endpointscfg.py b/google/appengine/tools/endpointscfg.py
index 8bd58f4..d67da86 100644
--- a/google/appengine/tools/endpointscfg.py
+++ b/google/appengine/tools/endpointscfg.py
@@ -28,7 +28,7 @@
 service to generate a client library for a target language (currently just Java)
 
 Example:
-  endpointscfg.py gen_client_lib java -o . greetings-v0.1-rest.api
+  endpointscfg.py gen_client_lib java -o . greetings-v0.1.discovery
 
 The get_client_lib subcommand does both of the above commands at once.
 
@@ -45,6 +45,7 @@
 from __future__ import with_statement
 
 
+import collections
 import contextlib
 
 try:
@@ -132,8 +133,11 @@
     A map from service names to a string containing the API configuration of the
       service in JSON format.
   """
-  service_map = {}
-  generator = generator or api_config.ApiConfigGenerator()
+
+
+
+
+  api_service_map = collections.OrderedDict()
   for service_class_name in service_class_names:
     module_name, base_service_class_name = service_class_name.rsplit('.', 1)
     module = __import__(module_name, fromlist=base_service_class_name)
@@ -141,10 +145,21 @@
     if not (isinstance(service, type) and issubclass(service, remote.Service)):
       raise TypeError('%s is not a ProtoRPC service' % service_class_name)
 
+    services = api_service_map.setdefault((service.api_info.name,
+                                           service.api_info.version),
+                                          [])
+    services.append(service)
 
-    hostname = service.api_info.hostname or hostname
-    service_map[service_class_name] = generator.pretty_print_config_to_json(
-        service, hostname=hostname)
+  service_map = collections.OrderedDict()
+  generator = generator or api_config.ApiConfigGenerator()
+  for api_info, services in api_service_map.iteritems():
+
+
+    hostname = services[0].api_info.hostname or hostname
+
+
+    service_map['%s-%s' % api_info] = generator.pretty_print_config_to_json(
+        services, hostname=hostname)
 
   return service_map
 
@@ -165,11 +180,11 @@
     ServerRequestException: If fetching the generated discovery doc fails.
 
   Returns:
-    A mapping from service names to discovery docs.
+    A list of discovery doc filenames.
   """
   output_files = []
   service_configs = GenApiConfig(service_class_names, hostname=hostname)
-  for service_class_name, config in service_configs.iteritems():
+  for api_name_version, config in service_configs.iteritems():
     body = json.dumps({'config': config}, indent=2, sort_keys=True)
     request = urllib2.Request(DISCOVERY_DOC_BASE + doc_format, body)
     request.add_header('content-type', 'application/json')
@@ -177,8 +192,7 @@
     try:
       with contextlib.closing(urllib2.urlopen(request)) as response:
         content = response.read()
-        _, base_service_class_name = service_class_name.rsplit('.', 1)
-        discovery_name = base_service_class_name + '.discovery'
+        discovery_name = api_name_version + '.discovery'
         output_files.append(_WriteFile(output_path, discovery_name, content))
     except urllib2.HTTPError, error:
       raise ServerRequestException(error)
@@ -279,9 +293,8 @@
       args.service, args.output, args.hostname)
   service_configs = api_func(service_class_names, hostname=hostname)
 
-  for service_class_name, config in service_configs.iteritems():
-    _, base_service_class_name = service_class_name.rsplit('.', 1)
-    api_name = base_service_class_name + '.api'
+  for api_name_version, config in service_configs.iteritems():
+    api_name = api_name_version + '.api'
     _WriteFile(output_path, api_name, config)
 
 
diff --git a/google/appengine/tools/sdk_update_checker.py b/google/appengine/tools/sdk_update_checker.py
index 87ef113..542455a 100644
--- a/google/appengine/tools/sdk_update_checker.py
+++ b/google/appengine/tools/sdk_update_checker.py
@@ -19,6 +19,7 @@
 import datetime
 import logging
 import os
+import socket
 import sys
 import time
 import urllib2
@@ -256,7 +257,7 @@
             timestamp=version['timestamp'],
             api_versions=version['api_versions'],
             runtime=runtime))
-    except urllib2.URLError, e:
+    except (urllib2.URLError, socket.error), e:
       logging.info('Update check failed: %s', e)
       return
 
diff --git a/google/net/proto2/proto/descriptor_pb2.py b/google/net/proto2/proto/descriptor_pb2.py
index c027e34..fac0cad 100644
--- a/google/net/proto2/proto/descriptor_pb2.py
+++ b/google/net/proto2/proto/descriptor_pb2.py
@@ -28,7 +28,7 @@
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='net/proto2/proto/descriptor.proto',
   package='proto2',
-  serialized_pb='\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xba\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x33\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x04true\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xca\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xdb\x04\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"\x1e\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9c\x06\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xde\x03\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
+  serialized_pb='\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\xba\t\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x33\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x04true\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xca\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xec\x04\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"/\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x0f\n\x0b\x42YTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9c\x06\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xde\x03\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
 
 
 
@@ -231,11 +231,15 @@
       name='BYTES', index=1, number=1,
       options=None,
       type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BYTE_BUFFER', index=2, number=2,
+      options=None,
+      type=None),
   ],
   containing_type=None,
   options=None,
   serialized_start=4221,
-  serialized_end=4251,
+  serialized_end=4268,
 )
 
 _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
@@ -259,8 +263,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=4253,
-  serialized_end=4306,
+  serialized_start=4270,
+  serialized_end=4323,
 )
 
 _METHODOPTIONS_PROTOCOL = _descriptor.EnumDescriptor(
@@ -280,8 +284,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5383,
-  serialized_end=5411,
+  serialized_start=5400,
+  serialized_end=5428,
 )
 
 _METHODOPTIONS_SECURITYLEVEL = _descriptor.EnumDescriptor(
@@ -309,8 +313,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5413,
-  serialized_end=5514,
+  serialized_start=5430,
+  serialized_end=5531,
 )
 
 _METHODOPTIONS_FORMAT = _descriptor.EnumDescriptor(
@@ -330,8 +334,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=5516,
-  serialized_end=5564,
+  serialized_start=5533,
+  serialized_end=5581,
 )
 
 _STREAMOPTIONS_TOKENUNIT = _descriptor.EnumDescriptor(
@@ -351,8 +355,8 @@
   ],
   containing_type=None,
   options=None,
-  serialized_start=6011,
-  serialized_end=6045,
+  serialized_start=6028,
+  serialized_end=6062,
 )
 
 
@@ -1323,7 +1327,7 @@
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
   serialized_start=3714,
-  serialized_end=4317,
+  serialized_end=4334,
 )
 
 
@@ -1371,8 +1375,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4320,
-  serialized_end=4473,
+  serialized_start=4337,
+  serialized_end=4490,
 )
 
 
@@ -1406,8 +1410,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4475,
-  serialized_end=4591,
+  serialized_start=4492,
+  serialized_end=4608,
 )
 
 
@@ -1455,8 +1459,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4594,
-  serialized_end=4776,
+  serialized_start=4611,
+  serialized_end=4793,
 )
 
 
@@ -1584,8 +1588,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=4779,
-  serialized_end=5575,
+  serialized_start=4796,
+  serialized_end=5592,
 )
 
 
@@ -1683,8 +1687,8 @@
   options=None,
   is_extendable=True,
   extension_ranges=[(1000, 536870912), ],
-  serialized_start=5578,
-  serialized_end=6056,
+  serialized_start=5595,
+  serialized_end=6073,
 )
 
 
@@ -1718,8 +1722,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6285,
-  serialized_end=6336,
+  serialized_start=6302,
+  serialized_end=6353,
 )
 
 _UNINTERPRETEDOPTION = _descriptor.Descriptor(
@@ -1787,8 +1791,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6059,
-  serialized_end=6336,
+  serialized_start=6076,
+  serialized_end=6353,
 )
 
 
@@ -1836,8 +1840,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6408,
-  serialized_end=6507,
+  serialized_start=6425,
+  serialized_end=6524,
 )
 
 _SOURCECODEINFO = _descriptor.Descriptor(
@@ -1863,8 +1867,8 @@
   options=None,
   is_extendable=False,
   extension_ranges=[],
-  serialized_start=6339,
-  serialized_end=6507,
+  serialized_start=6356,
+  serialized_end=6524,
 )
 
 _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
diff --git a/google/net/proto2/python/public/text_format.py b/google/net/proto2/python/public/text_format.py
index 75df781..60e15cc 100644
--- a/google/net/proto2/python/public/text_format.py
+++ b/google/net/proto2/python/public/text_format.py
@@ -177,10 +177,13 @@
     text: Message ASCII representation.
     message: A protocol buffer message to merge into.
 
+  Returns:
+    The same message passed as argument.
+
   Raises:
     ParseError: On ASCII parsing problems.
   """
-  ParseLines(text.split('\n'), message)
+  return ParseLines(text.split('\n'), message)
 
 
 def Merge(text, message):
@@ -193,10 +196,13 @@
     text: Message ASCII representation.
     message: A protocol buffer message to merge into.
 
+  Returns:
+    The same message passed as argument.
+
   Raises:
     ParseError: On ASCII parsing problems.
   """
-  MergeLines(text.split('\n'), message)
+  return MergeLines(text.split('\n'), message)
 
 
 def ParseLines(lines, message):
@@ -206,10 +212,14 @@
     lines: An iterable of lines of a message's ASCII representation.
     message: A protocol buffer message to merge into.
 
+  Returns:
+    The same message passed as argument.
+
   Raises:
     ParseError: On ASCII parsing problems.
   """
   _ParseOrMerge(lines, message, False)
+  return message
 
 
 def MergeLines(lines, message):
@@ -219,10 +229,14 @@
     lines: An iterable of lines of a message's ASCII representation.
     message: A protocol buffer message to merge into.
 
+  Returns:
+    The same message passed as argument.
+
   Raises:
     ParseError: On ASCII parsing problems.
   """
   _ParseOrMerge(lines, message, True)
+  return message
 
 
 def _MergeField(tokenizer, message, allow_multiple_scalars):
diff --git a/httplib2/__init__.py b/httplib2/__init__.py
new file mode 100644
index 0000000..191ef1e
--- /dev/null
+++ b/httplib2/__init__.py
@@ -0,0 +1,1673 @@
+from __future__ import generators
+"""
+httplib2
+
+A caching http interface that supports ETags and gzip
+to conserve bandwidth.
+
+Requires Python 2.3 or later
+
+Changelog:
+2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
+
+"""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
+                    "James Antill",
+                    "Xavier Verges Farrero",
+                    "Jonathan Feinberg",
+                    "Blair Zajac",
+                    "Sam Ruby",
+                    "Louis Nyffenegger"]
+__license__ = "MIT"
+__version__ = "0.8"
+
+import re
+import sys
+import email
+import email.Utils
+import email.Message
+import email.FeedParser
+import StringIO
+import gzip
+import zlib
+import httplib
+import urlparse
+import urllib
+import base64
+import os
+import copy
+import calendar
+import time
+import random
+import errno
+try:
+    from hashlib import sha1 as _sha, md5 as _md5
+except ImportError:
+    # prior to Python 2.5, these were separate modules
+    import sha
+    import md5
+    _sha = sha.new
+    _md5 = md5.new
+import hmac
+from gettext import gettext as _
+import socket
+
+try:
+    from httplib2 import socks
+except ImportError:
+    try:
+        import socks
+    except (ImportError, AttributeError):
+        socks = None
+
+# Build the appropriate socket wrapper for ssl
+try:
+    import ssl # python 2.6
+    ssl_SSLError = ssl.SSLError
+    def _ssl_wrap_socket(sock, key_file, cert_file,
+                         disable_validation, ca_certs):
+        if disable_validation:
+            cert_reqs = ssl.CERT_NONE
+        else:
+            cert_reqs = ssl.CERT_REQUIRED
+        # We should be specifying SSL version 3 or TLS v1, but the ssl module
+        # doesn't expose the necessary knobs. So we need to go with the default
+        # of SSLv23.
+        return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
+                               cert_reqs=cert_reqs, ca_certs=ca_certs)
+except (AttributeError, ImportError):
+    ssl_SSLError = None
+    def _ssl_wrap_socket(sock, key_file, cert_file,
+                         disable_validation, ca_certs):
+        if not disable_validation:
+            raise CertificateValidationUnsupported(
+                    "SSL certificate validation is not supported without "
+                    "the ssl module installed. To avoid this error, install "
+                    "the ssl module, or explicity disable validation.")
+        ssl_sock = socket.ssl(sock, key_file, cert_file)
+        return httplib.FakeSocket(sock, ssl_sock)
+
+
+if sys.version_info >= (2,3):
+    from iri2uri import iri2uri
+else:
+    def iri2uri(uri):
+        return uri
+
+def has_timeout(timeout): # python 2.6
+    if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
+        return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
+    return (timeout is not None)
+
+__all__ = [
+    'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
+    'RedirectLimit', 'FailedToDecompressContent',
+    'UnimplementedDigestAuthOptionError',
+    'UnimplementedHmacDigestAuthOptionError',
+    'debuglevel', 'ProxiesUnavailableError']
+
+
+# The httplib debug level, set to a non-zero value to get debug output
+debuglevel = 0
+
+# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
+RETRIES = 2
+
+# Python 2.3 support
+if sys.version_info < (2,4):
+    def sorted(seq):
+        seq.sort()
+        return seq
+
+# Python 2.3 support
+def HTTPResponse__getheaders(self):
+    """Return list of (header, value) tuples."""
+    if self.msg is None:
+        raise httplib.ResponseNotReady()
+    return self.msg.items()
+
+if not hasattr(httplib.HTTPResponse, 'getheaders'):
+    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
+
+# All exceptions raised here derive from HttpLib2Error
+class HttpLib2Error(Exception): pass
+
+# Some exceptions can be caught and optionally
+# be turned back into responses.
+class HttpLib2ErrorWithResponse(HttpLib2Error):
+    def __init__(self, desc, response, content):
+        self.response = response
+        self.content = content
+        HttpLib2Error.__init__(self, desc)
+
+class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
+class RedirectLimit(HttpLib2ErrorWithResponse): pass
+class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
+
+class MalformedHeader(HttpLib2Error): pass
+class RelativeURIError(HttpLib2Error): pass
+class ServerNotFoundError(HttpLib2Error): pass
+class ProxiesUnavailableError(HttpLib2Error): pass
+class CertificateValidationUnsupported(HttpLib2Error): pass
+class SSLHandshakeError(HttpLib2Error): pass
+class NotSupportedOnThisPlatform(HttpLib2Error): pass
+class CertificateHostnameMismatch(SSLHandshakeError):
+    def __init__(self, desc, host, cert):
+        HttpLib2Error.__init__(self, desc)
+        self.host = host
+        self.cert = cert
+
+# Open Items:
+# -----------
+# Proxy support
+
+# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
+
+# Pluggable cache storage (supports storing the cache in
+#   flat files by default. We need a plug-in architecture
+#   that can support Berkeley DB and Squid)
+
+# == Known Issues ==
+# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
+# Does not handle Cache-Control: max-stale
+# Does not use Age: headers when calculating cache freshness.
+
+
+# The number of redirections to follow before giving up.
+# Note that only GET redirects are automatically followed.
+# Will also honor 301 requests by saving that info and never
+# requesting that URI again.
+DEFAULT_MAX_REDIRECTS = 5
+
+try:
+    # Users can optionally provide a module that tells us where the CA_CERTS
+    # are located.
+    import ca_certs_locater
+    CA_CERTS = ca_certs_locater.get()
+except ImportError:
+    # Default CA certificates file bundled with httplib2.
+    CA_CERTS = os.path.join(
+        os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
+
+# Which headers are hop-by-hop headers by default
+HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
+
+def _get_end2end_headers(response):
+    hopbyhop = list(HOP_BY_HOP)
+    hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
+    return [header for header in response.keys() if header not in hopbyhop]
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+def urlnorm(uri):
+    (scheme, authority, path, query, fragment) = parse_uri(uri)
+    if not scheme or not authority:
+        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
+    authority = authority.lower()
+    scheme = scheme.lower()
+    if not path:
+        path = "/"
+    # Could do syntax based normalization of the URI before
+    # computing the digest. See Section 6.2.2 of Std 66.
+    request_uri = query and "?".join([path, query]) or path
+    scheme = scheme.lower()
+    defrag_uri = scheme + "://" + authority + request_uri
+    return scheme, authority, request_uri, defrag_uri
+
+
+# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
+re_url_scheme    = re.compile(r'^\w+://')
+re_slash         = re.compile(r'[?/:|]+')
+
+def safename(filename):
+    """Return a filename suitable for the cache.
+
+    Strips dangerous and common characters to create a filename we
+    can use to store the cache in.
+    """
+
+    try:
+        if re_url_scheme.match(filename):
+            if isinstance(filename,str):
+                filename = filename.decode('utf-8')
+                filename = filename.encode('idna')
+            else:
+                filename = filename.encode('idna')
+    except UnicodeError:
+        pass
+    if isinstance(filename,unicode):
+        filename=filename.encode('utf-8')
+    filemd5 = _md5(filename).hexdigest()
+    filename = re_url_scheme.sub("", filename)
+    filename = re_slash.sub(",", filename)
+
+    # limit length of filename
+    if len(filename)>200:
+        filename=filename[:200]
+    return ",".join((filename, filemd5))
+
+NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
+def _normalize_headers(headers):
+    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.iteritems()])
+
+def _parse_cache_control(headers):
+    retval = {}
+    if headers.has_key('cache-control'):
+        parts =  headers['cache-control'].split(',')
+        parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
+        parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
+        retval = dict(parts_with_args + parts_wo_args)
+    return retval
+
+# Whether to use a strict mode to parse WWW-Authenticate headers
+# Might lead to bad results in case of ill-formed header value,
+# so disabled by default, falling back to relaxed parsing.
+# Set to true to turn on, usefull for testing servers.
+USE_WWW_AUTH_STRICT_PARSING = 0
+
+# In regex below:
+#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
+#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
+# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
+#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
+WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
+WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
+UNQUOTE_PAIRS = re.compile(r'\\(.)')
+def _parse_www_authenticate(headers, headername='www-authenticate'):
+    """Returns a dictionary of dictionaries, one dict
+    per auth_scheme."""
+    retval = {}
+    if headers.has_key(headername):
+        try:
+
+            authenticate = headers[headername].strip()
+            www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+            while authenticate:
+                # Break off the scheme at the beginning of the line
+                if headername == 'authentication-info':
+                    (auth_scheme, the_rest) = ('digest', authenticate)
+                else:
+                    (auth_scheme, the_rest) = authenticate.split(" ", 1)
+                # Now loop over all the key value pairs that come after the scheme,
+                # being careful not to roll into the next scheme
+                match = www_auth.search(the_rest)
+                auth_params = {}
+                while match:
+                    if match and len(match.groups()) == 3:
+                        (key, value, the_rest) = match.groups()
+                        auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+                    match = www_auth.search(the_rest)
+                retval[auth_scheme.lower()] = auth_params
+                authenticate = the_rest.strip()
+
+        except ValueError:
+            raise MalformedHeader("WWW-Authenticate")
+    return retval
+
+
+def _entry_disposition(response_headers, request_headers):
+    """Determine freshness from the Date, Expires and Cache-Control headers.
+
+    We don't handle the following:
+
+    1. Cache-Control: max-stale
+    2. Age: headers are not used in the calculations.
+
+    Not that this algorithm is simpler than you might think
+    because we are operating as a private (non-shared) cache.
+    This lets us ignore 's-maxage'. We can also ignore
+    'proxy-invalidate' since we aren't a proxy.
+    We will never return a stale document as
+    fresh as a design decision, and thus the non-implementation
+    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
+    since we operate as if every server has sent 'must-revalidate'.
+    Since we are private we get to ignore both 'public' and
+    'private' parameters. We also ignore 'no-transform' since
+    we don't do any transformations.
+    The 'no-store' parameter is handled at a higher level.
+    So the only Cache-Control parameters we look at are:
+
+    no-cache
+    only-if-cached
+    max-age
+    min-fresh
+    """
+
+    retval = "STALE"
+    cc = _parse_cache_control(request_headers)
+    cc_response = _parse_cache_control(response_headers)
+
+    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
+        retval = "TRANSPARENT"
+        if 'cache-control' not in request_headers:
+            request_headers['cache-control'] = 'no-cache'
+    elif cc.has_key('no-cache'):
+        retval = "TRANSPARENT"
+    elif cc_response.has_key('no-cache'):
+        retval = "STALE"
+    elif cc.has_key('only-if-cached'):
+        retval = "FRESH"
+    elif response_headers.has_key('date'):
+        date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
+        now = time.time()
+        current_age = max(0, now - date)
+        if cc_response.has_key('max-age'):
+            try:
+                freshness_lifetime = int(cc_response['max-age'])
+            except ValueError:
+                freshness_lifetime = 0
+        elif response_headers.has_key('expires'):
+            expires = email.Utils.parsedate_tz(response_headers['expires'])
+            if None == expires:
+                freshness_lifetime = 0
+            else:
+                freshness_lifetime = max(0, calendar.timegm(expires) - date)
+        else:
+            freshness_lifetime = 0
+        if cc.has_key('max-age'):
+            try:
+                freshness_lifetime = int(cc['max-age'])
+            except ValueError:
+                freshness_lifetime = 0
+        if cc.has_key('min-fresh'):
+            try:
+                min_fresh = int(cc['min-fresh'])
+            except ValueError:
+                min_fresh = 0
+            current_age += min_fresh
+        if freshness_lifetime > current_age:
+            retval = "FRESH"
+    return retval
+
+def _decompressContent(response, new_content):
+    content = new_content
+    try:
+        encoding = response.get('content-encoding', None)
+        if encoding in ['gzip', 'deflate']:
+            if encoding == 'gzip':
+                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
+            if encoding == 'deflate':
+                content = zlib.decompress(content)
+            response['content-length'] = str(len(content))
+            # Record the historical presence of the encoding in a way the won't interfere.
+            response['-content-encoding'] = response['content-encoding']
+            del response['content-encoding']
+    except IOError:
+        content = ""
+        raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
+    return content
+
+def _updateCache(request_headers, response_headers, content, cache, cachekey):
+    if cachekey:
+        cc = _parse_cache_control(request_headers)
+        cc_response = _parse_cache_control(response_headers)
+        if cc.has_key('no-store') or cc_response.has_key('no-store'):
+            cache.delete(cachekey)
+        else:
+            info = email.Message.Message()
+            for key, value in response_headers.iteritems():
+                if key not in ['status','content-encoding','transfer-encoding']:
+                    info[key] = value
+
+            # Add annotations to the cache to indicate what headers
+            # are variant for this request.
+            vary = response_headers.get('vary', None)
+            if vary:
+                vary_headers = vary.lower().replace(' ', '').split(',')
+                for header in vary_headers:
+                    key = '-varied-%s' % header
+                    try:
+                        info[key] = request_headers[header]
+                    except KeyError:
+                        pass
+
+            status = response_headers.status
+            if status == 304:
+                status = 200
+
+            status_header = 'status: %d\r\n' % status
+
+            header_str = info.as_string()
+
+            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
+            text = "".join([status_header, header_str, content])
+
+            cache.set(cachekey, text)
+
+def _cnonce():
+    dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
+    return dig[:16]
+
+def _wsse_username_token(cnonce, iso_now, password):
+    return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
+
+
+# For credentials we need two things, first
+# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
+# Then we also need a list of URIs that have already demanded authentication
+# That list is tricky since sub-URIs can take the same auth, or the
+# auth scheme may change as you descend the tree.
+# So we also need each Auth instance to be able to tell us
+# how close to the 'top' it is.
+
+class Authentication(object):
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        self.path = path
+        self.host = host
+        self.credentials = credentials
+        self.http = http
+
+    def depth(self, request_uri):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return request_uri[len(self.path):].count("/")
+
+    def inscope(self, host, request_uri):
+        # XXX Should we normalize the request_uri?
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return (host == self.host) and path.startswith(self.path)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header. Over-ride this in sub-classes."""
+        pass
+
+    def response(self, response, content):
+        """Gives us a chance to update with new nonces
+        or such returned from the last authorized response.
+        Over-rise this in sub-classes if necessary.
+
+        Return TRUE is the request is to be retried, for
+        example Digest may return stale=true.
+        """
+        return False
+
+
+
+class BasicAuthentication(Authentication):
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
+
+
+class DigestAuthentication(Authentication):
+    """Only do qop='auth' and MD5, since that
+    is all Apache currently implements"""
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+        challenge = _parse_www_authenticate(response, 'www-authenticate')
+        self.challenge = challenge['digest']
+        qop = self.challenge.get('qop', 'auth')
+        self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
+        if self.challenge['qop'] is None:
+            raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
+        self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
+        if self.challenge['algorithm'] != 'MD5':
+            raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
+        self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
+        self.challenge['nc'] = 1
+
+    def request(self, method, request_uri, headers, content, cnonce = None):
+        """Modify the request headers"""
+        H = lambda x: _md5(x).hexdigest()
+        KD = lambda s, d: H("%s:%s" % (s, d))
+        A2 = "".join([method, ":", request_uri])
+        self.challenge['cnonce'] = cnonce or _cnonce()
+        request_digest  = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
+                self.challenge['nonce'],
+                '%08x' % self.challenge['nc'],
+                self.challenge['cnonce'],
+                self.challenge['qop'], H(A2)))
+        headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
+                self.credentials[0],
+                self.challenge['realm'],
+                self.challenge['nonce'],
+                request_uri,
+                self.challenge['algorithm'],
+                request_digest,
+                self.challenge['qop'],
+                self.challenge['nc'],
+                self.challenge['cnonce'])
+        if self.challenge.get('opaque'):
+            headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
+        self.challenge['nc'] += 1
+
+    def response(self, response, content):
+        if not response.has_key('authentication-info'):
+            challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
+            if 'true' == challenge.get('stale'):
+                self.challenge['nonce'] = challenge['nonce']
+                self.challenge['nc'] = 1
+                return True
+        else:
+            updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
+
+            if updated_challenge.has_key('nextnonce'):
+                self.challenge['nonce'] = updated_challenge['nextnonce']
+                self.challenge['nc'] = 1
+        return False
+
+
+class HmacDigestAuthentication(Authentication):
+    """Adapted from Robert Sayre's code and DigestAuthentication above."""
+    __author__ = "Thomas Broyer (t.broyer@ltgt.net)"
+
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+        challenge = _parse_www_authenticate(response, 'www-authenticate')
+        self.challenge = challenge['hmacdigest']
+        # TODO: self.challenge['domain']
+        self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
+        if self.challenge['reason'] not in ['unauthorized', 'integrity']:
+            self.challenge['reason'] = 'unauthorized'
+        self.challenge['salt'] = self.challenge.get('salt', '')
+        if not self.challenge.get('snonce'):
+            raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
+        self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
+        if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
+            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
+        self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
+        if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
+            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
+        if self.challenge['algorithm'] == 'HMAC-MD5':
+            self.hashmod = _md5
+        else:
+            self.hashmod = _sha
+        if self.challenge['pw-algorithm'] == 'MD5':
+            self.pwhashmod = _md5
+        else:
+            self.pwhashmod = _sha
+        self.key = "".join([self.credentials[0], ":",
+                            self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
+                            ":", self.challenge['realm']])
+        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers"""
+        keys = _get_end2end_headers(headers)
+        keylist = "".join(["%s " % k for k in keys])
+        headers_val = "".join([headers[k] for k in keys])
+        created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
+        cnonce = _cnonce()
+        request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
+        request_digest  = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
+        headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
+                self.credentials[0],
+                self.challenge['realm'],
+                self.challenge['snonce'],
+                cnonce,
+                request_uri,
+                created,
+                request_digest,
+                keylist)
+
+    def response(self, response, content):
+        challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
+        if challenge.get('reason') in ['integrity', 'stale']:
+            return True
+        return False
+
+
+class WsseAuthentication(Authentication):
+    """This is thinly tested and should not be relied upon.
+    At this time there isn't any third party server to test against.
+    Blogger and TypePad implemented this algorithm at one point
+    but Blogger has since switched to Basic over HTTPS and
+    TypePad has implemented it wrong, by never issuing a 401
+    challenge but instead requiring your client to telepathically know that
+    their endpoint is expecting WSSE profile="UsernameToken"."""
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers['authorization'] = 'WSSE profile="UsernameToken"'
+        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
+        headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
+                self.credentials[0],
+                password_digest,
+                cnonce,
+                iso_now)
+
+class GoogleLoginAuthentication(Authentication):
+    def __init__(self, credentials, host, request_uri, headers, response, content, http):
+        from urllib import urlencode
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+        challenge = _parse_www_authenticate(response, 'www-authenticate')
+        service = challenge['googlelogin'].get('service', 'xapi')
+        # Bloggger actually returns the service in the challenge
+        # For the rest we guess based on the URI
+        if service == 'xapi' and  request_uri.find("calendar") > 0:
+            service = "cl"
+        # No point in guessing Base or Spreadsheet
+        #elif request_uri.find("spreadsheets") > 0:
+        #    service = "wise"
+
+        auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
+        resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
+        lines = content.split('\n')
+        d = dict([tuple(line.split("=", 1)) for line in lines if line])
+        if resp.status == 403:
+            self.Auth = ""
+        else:
+            self.Auth = d['Auth']
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
+
+
+AUTH_SCHEME_CLASSES = {
+    "basic": BasicAuthentication,
+    "wsse": WsseAuthentication,
+    "digest": DigestAuthentication,
+    "hmacdigest": HmacDigestAuthentication,
+    "googlelogin": GoogleLoginAuthentication
+}
+
+AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
+
+class FileCache(object):
+    """Uses a local directory as a store for cached files.
+    Not really safe to use if multiple threads or processes are going to
+    be running on the same cache.
+    """
+    def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+        self.cache = cache
+        self.safe = safe
+        if not os.path.exists(cache):
+            os.makedirs(self.cache)
+
+    def get(self, key):
+        retval = None
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        try:
+            f = file(cacheFullPath, "rb")
+            retval = f.read()
+            f.close()
+        except IOError:
+            pass
+        return retval
+
+    def set(self, key, value):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        f = file(cacheFullPath, "wb")
+        f.write(value)
+        f.close()
+
+    def delete(self, key):
+        cacheFullPath = os.path.join(self.cache, self.safe(key))
+        if os.path.exists(cacheFullPath):
+            os.remove(cacheFullPath)
+
+class Credentials(object):
+    def __init__(self):
+        self.credentials = []
+
+    def add(self, name, password, domain=""):
+        self.credentials.append((domain.lower(), name, password))
+
+    def clear(self):
+        self.credentials = []
+
+    def iter(self, domain):
+        for (cdomain, name, password) in self.credentials:
+            if cdomain == "" or domain == cdomain:
+                yield (name, password)
+
+class KeyCerts(Credentials):
+    """Identical to Credentials except that
+    name/password are mapped to key/cert."""
+    pass
+
+class AllHosts(object):
+    pass
+
+class ProxyInfo(object):
+    """Collect information required to use a proxy."""
+    bypass_hosts = ()
+
+    def __init__(self, proxy_type, proxy_host, proxy_port,
+                 proxy_rdns=None, proxy_user=None, proxy_pass=None):
+        """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
+        constants. For example:
+
+        p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
+            proxy_host='localhost', proxy_port=8000)
+        """
+        self.proxy_type = proxy_type
+        self.proxy_host = proxy_host
+        self.proxy_port = proxy_port
+        self.proxy_rdns = proxy_rdns
+        self.proxy_user = proxy_user
+        self.proxy_pass = proxy_pass
+
+    def astuple(self):
+        return (self.proxy_type, self.proxy_host, self.proxy_port,
+                self.proxy_rdns, self.proxy_user, self.proxy_pass)
+
+    def isgood(self):
+        return (self.proxy_host != None) and (self.proxy_port != None)
+
+    def applies_to(self, hostname):
+        return not self.bypass_host(hostname)
+
+    def bypass_host(self, hostname):
+        """Has this host been excluded from the proxy config"""
+        if self.bypass_hosts is AllHosts:
+            return True
+
+        bypass = False
+        for domain in self.bypass_hosts:
+            if hostname.endswith(domain):
+                bypass = True
+
+        return bypass
+
+
+def proxy_info_from_environment(method='http'):
+    """
+    Read proxy info from the environment variables.
+    """
+    if method not in ['http', 'https']:
+        return
+
+    env_var = method + '_proxy'
+    url = os.environ.get(env_var, os.environ.get(env_var.upper()))
+    if not url:
+        return
+    pi = proxy_info_from_url(url, method)
+
+    no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
+    bypass_hosts = []
+    if no_proxy:
+        bypass_hosts = no_proxy.split(',')
+    # special case, no_proxy=* means all hosts bypassed
+    if no_proxy == '*':
+        bypass_hosts = AllHosts
+
+    pi.bypass_hosts = bypass_hosts
+    return pi
+
+def proxy_info_from_url(url, method='http'):
+    """
+    Construct a ProxyInfo from a URL (such as http_proxy env var)
+    """
+    url = urlparse.urlparse(url)
+    username = None
+    password = None
+    port = None
+    if '@' in url[1]:
+        ident, host_port = url[1].split('@', 1)
+        if ':' in ident:
+            username, password = ident.split(':', 1)
+        else:
+            password = ident
+    else:
+        host_port = url[1]
+    if ':' in host_port:
+        host, port = host_port.split(':', 1)
+    else:
+        host = host_port
+
+    if port:
+        port = int(port)
+    else:
+        port = dict(https=443, http=80)[method]
+
+    proxy_type = 3 # socks.PROXY_TYPE_HTTP
+    return ProxyInfo(
+        proxy_type = proxy_type,
+        proxy_host = host,
+        proxy_port = port,
+        proxy_user = username or None,
+        proxy_pass = password or None,
+    )
+
+
+class HTTPConnectionWithTimeout(httplib.HTTPConnection):
+    """
+    HTTPConnection subclass that supports timeouts
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+
+    def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
+        httplib.HTTPConnection.__init__(self, host, port, strict)
+        self.timeout = timeout
+        self.proxy_info = proxy_info
+
+    def connect(self):
+        """Connect to the host and port specified in __init__."""
+        # Mostly verbatim from httplib.py.
+        if self.proxy_info and socks is None:
+            raise ProxiesUnavailableError(
+                'Proxy support missing but proxy use was requested!')
+        msg = "getaddrinfo returns an empty list"
+        if self.proxy_info and self.proxy_info.isgood():
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
+        else:
+            use_proxy = False
+        if use_proxy and proxy_rdns:
+            host = proxy_host
+            port = proxy_port
+        else:
+            host = self.host
+            port = self.port
+
+        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                if use_proxy:
+                    self.sock = socks.socksocket(af, socktype, proto)
+                    self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
+                else:
+                    self.sock = socket.socket(af, socktype, proto)
+                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                # Different from httplib: support timeouts.
+                if has_timeout(self.timeout):
+                    self.sock.settimeout(self.timeout)
+                    # End of difference from httplib.
+                if self.debuglevel > 0:
+                    print "connect: (%s, %s) ************" % (self.host, self.port)
+                    if use_proxy:
+                        print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
+
+                self.sock.connect((self.host, self.port) + sa[2:])
+            except socket.error, msg:
+                if self.debuglevel > 0:
+                    print "connect fail: (%s, %s)" % (self.host, self.port)
+                    if use_proxy:
+                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+
+class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
+    """
+    This class allows communication via SSL.
+
+    All timeouts are in seconds. If None is passed for timeout then
+    Python's default timeout for sockets will be used. See for example
+    the docs of socket.setdefaulttimeout():
+    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+    """
+    def __init__(self, host, port=None, key_file=None, cert_file=None,
+                 strict=None, timeout=None, proxy_info=None,
+                 ca_certs=None, disable_ssl_certificate_validation=False):
+        httplib.HTTPSConnection.__init__(self, host, port=port,
+                                         key_file=key_file,
+                                         cert_file=cert_file, strict=strict)
+        self.timeout = timeout
+        self.proxy_info = proxy_info
+        if ca_certs is None:
+            ca_certs = CA_CERTS
+        self.ca_certs = ca_certs
+        self.disable_ssl_certificate_validation = \
+                disable_ssl_certificate_validation
+
+    # The following two methods were adapted from https_wrapper.py, released
+    # with the Google Appengine SDK at
+    # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
+    # under the following license:
+    #
+    # Copyright 2007 Google Inc.
+    #
+    # Licensed under the Apache License, Version 2.0 (the "License");
+    # you may not use this file except in compliance with the License.
+    # You may obtain a copy of the License at
+    #
+    #     http://www.apache.org/licenses/LICENSE-2.0
+    #
+    # Unless required by applicable law or agreed to in writing, software
+    # distributed under the License is distributed on an "AS IS" BASIS,
+    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    # See the License for the specific language governing permissions and
+    # limitations under the License.
+    #
+
+    def _GetValidHostsForCert(self, cert):
+        """Returns a list of valid host globs for an SSL certificate.
+
+        Args:
+          cert: A dictionary representing an SSL certificate.
+        Returns:
+          list: A list of valid host globs.
+        """
+        if 'subjectAltName' in cert:
+            return [x[1] for x in cert['subjectAltName']
+                    if x[0].lower() == 'dns']
+        else:
+            return [x[0][1] for x in cert['subject']
+                    if x[0][0].lower() == 'commonname']
+
+    def _ValidateCertificateHostname(self, cert, hostname):
+        """Validates that a given hostname is valid for an SSL certificate.
+
+        Args:
+          cert: A dictionary representing an SSL certificate.
+          hostname: The hostname to test.
+        Returns:
+          bool: Whether or not the hostname is valid for this certificate.
+        """
+        hosts = self._GetValidHostsForCert(cert)
+        for host in hosts:
+            host_re = host.replace('.', '\.').replace('*', '[^.]*')
+            if re.search('^%s$' % (host_re,), hostname, re.I):
+                return True
+        return False
+
+    def connect(self):
+        "Connect to a host on a given (SSL) port."
+
+        msg = "getaddrinfo returns an empty list"
+        if self.proxy_info and self.proxy_info.isgood():
+            use_proxy = True
+            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
+        else:
+            use_proxy = False
+        if use_proxy and proxy_rdns:
+            host = proxy_host
+            port = proxy_port
+        else:
+            host = self.host
+            port = self.port
+
+        address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
+        for family, socktype, proto, canonname, sockaddr in address_info:
+            try:
+                if use_proxy:
+                    sock = socks.socksocket(family, socktype, proto)
+
+                    sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
+                else:
+                    sock = socket.socket(family, socktype, proto)
+                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+                if has_timeout(self.timeout):
+                    sock.settimeout(self.timeout)
+                sock.connect((self.host, self.port))
+                self.sock =_ssl_wrap_socket(
+                    sock, self.key_file, self.cert_file,
+                    self.disable_ssl_certificate_validation, self.ca_certs)
+                if self.debuglevel > 0:
+                    print "connect: (%s, %s)" % (self.host, self.port)
+                    if use_proxy:
+                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
+                if not self.disable_ssl_certificate_validation:
+                    cert = self.sock.getpeercert()
+                    hostname = self.host.split(':', 0)[0]
+                    if not self._ValidateCertificateHostname(cert, hostname):
+                        raise CertificateHostnameMismatch(
+                            'Server presented certificate that does not match '
+                            'host %s: %s' % (hostname, cert), hostname, cert)
+            except ssl_SSLError, e:
+                if sock:
+                    sock.close()
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                # Unfortunately the ssl module doesn't seem to provide any way
+                # to get at more detailed error information, in particular
+                # whether the error is due to certificate validation or
+                # something else (such as SSL protocol mismatch).
+                if e.errno == ssl.SSL_ERROR_SSL:
+                    raise SSLHandshakeError(e)
+                else:
+                    raise
+            except (socket.timeout, socket.gaierror):
+                raise
+            except socket.error, msg:
+                if self.debuglevel > 0:
+                    print "connect fail: (%s, %s)" % (self.host, self.port)
+                    if use_proxy:
+                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+
+SCHEME_TO_CONNECTION = {
+    'http': HTTPConnectionWithTimeout,
+    'https': HTTPSConnectionWithTimeout
+}
+
+# Use a different connection object for Google App Engine
+try:
+    try:
+        from google.appengine.api import apiproxy_stub_map
+        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
+            raise ImportError  # Bail out; we're not actually running on App Engine.
+        from google.appengine.api.urlfetch import fetch
+        from google.appengine.api.urlfetch import InvalidURLError
+    except (ImportError, AttributeError):
+        from google3.apphosting.api import apiproxy_stub_map
+        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
+            raise ImportError  # Bail out; we're not actually running on App Engine.
+        from google3.apphosting.api.urlfetch import fetch
+        from google3.apphosting.api.urlfetch import InvalidURLError
+
+    def _new_fixed_fetch(validate_certificate):
+        def fixed_fetch(url, payload=None, method="GET", headers={},
+                        allow_truncated=False, follow_redirects=True,
+                        deadline=5):
+            return fetch(url, payload=payload, method=method, headers=headers,
+                         allow_truncated=allow_truncated,
+                         follow_redirects=follow_redirects, deadline=deadline,
+                         validate_certificate=validate_certificate)
+        return fixed_fetch
+
+    class AppEngineHttpConnection(httplib.HTTPConnection):
+        """Use httplib on App Engine, but compensate for its weirdness.
+
+        The parameters key_file, cert_file, proxy_info, ca_certs, and
+        disable_ssl_certificate_validation are all dropped on the ground.
+        """
+        def __init__(self, host, port=None, key_file=None, cert_file=None,
+                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
+                     disable_ssl_certificate_validation=False):
+            httplib.HTTPConnection.__init__(self, host, port=port,
+                                            strict=strict, timeout=timeout)
+
+    class AppEngineHttpsConnection(httplib.HTTPSConnection):
+        """Same as AppEngineHttpConnection, but for HTTPS URIs."""
+        def __init__(self, host, port=None, key_file=None, cert_file=None,
+                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
+                     disable_ssl_certificate_validation=False):
+            httplib.HTTPSConnection.__init__(self, host, port=port,
+                                             key_file=key_file,
+                                             cert_file=cert_file, strict=strict,
+                                             timeout=timeout)
+            self._fetch = _new_fixed_fetch(
+                    not disable_ssl_certificate_validation)
+
+    # Update the connection classes to use the Googel App Engine specific ones.
+    SCHEME_TO_CONNECTION = {
+        'http': AppEngineHttpConnection,
+        'https': AppEngineHttpsConnection
+    }
+except (ImportError, AttributeError):
+    pass
+
+
+class Http(object):
+    """An HTTP client that handles:
+
+    - all methods
+    - caching
+    - ETags
+    - compression,
+    - HTTPS
+    - Basic
+    - Digest
+    - WSSE
+
+    and more.
+    """
+    def __init__(self, cache=None, timeout=None,
+                 proxy_info=proxy_info_from_environment,
+                 ca_certs=None, disable_ssl_certificate_validation=False):
+        """If 'cache' is a string then it is used as a directory name for
+        a disk cache. Otherwise it must be an object that supports the
+        same interface as FileCache.
+
+        All timeouts are in seconds. If None is passed for timeout
+        then Python's default timeout for sockets will be used. See
+        for example the docs of socket.setdefaulttimeout():
+        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
+
+        `proxy_info` may be:
+          - a callable that takes the http scheme ('http' or 'https') and
+            returns a ProxyInfo instance per request. By default, uses
+            proxy_nfo_from_environment.
+          - a ProxyInfo instance (static proxy config).
+          - None (proxy disabled).
+
+        ca_certs is the path of a file containing root CA certificates for SSL
+        server certificate validation.  By default, a CA cert file bundled with
+        httplib2 is used.
+
+        If disable_ssl_certificate_validation is true, SSL cert validation will
+        not be performed.
+        """
+        self.proxy_info = proxy_info
+        self.ca_certs = ca_certs
+        self.disable_ssl_certificate_validation = \
+                disable_ssl_certificate_validation
+
+        # Map domain name to an httplib connection
+        self.connections = {}
+        # The location of the cache, for now a directory
+        # where cached responses are held.
+        if cache and isinstance(cache, basestring):
+            self.cache = FileCache(cache)
+        else:
+            self.cache = cache
+
+        # Name/password
+        self.credentials = Credentials()
+
+        # Key/cert
+        self.certificates = KeyCerts()
+
+        # authorization objects
+        self.authorizations = []
+
+        # If set to False then no redirects are followed, even safe ones.
+        self.follow_redirects = True
+
+        # Which HTTP methods do we apply optimistic concurrency to, i.e.
+        # which methods get an "if-match:" etag header added to them.
+        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
+
+        # If 'follow_redirects' is True, and this is set to True then
+        # all redirecs are followed, including unsafe ones.
+        self.follow_all_redirects = False
+
+        self.ignore_etag = False
+
+        self.force_exception_to_status_code = False
+
+        self.timeout = timeout
+
+        # Keep Authorization: headers on a redirect.
+        self.forward_authorization_headers = False
+
+    def __getstate__(self):
+        state_dict = copy.copy(self.__dict__)
+        # In case request is augmented by some foreign object such as
+        # credentials which handle auth
+        if 'request' in state_dict:
+            del state_dict['request']
+        if 'connections' in state_dict:
+            del state_dict['connections']
+        return state_dict
+
+    def __setstate__(self, state):
+        self.__dict__.update(state)
+        self.connections = {}
+
+    def _auth_from_challenge(self, host, request_uri, headers, response, content):
+        """A generator that creates Authorization objects
+           that can be applied to requests.
+        """
+        challenges = _parse_www_authenticate(response, 'www-authenticate')
+        for cred in self.credentials.iter(host):
+            for scheme in AUTH_SCHEME_ORDER:
+                if challenges.has_key(scheme):
+                    yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
+
+    def add_credentials(self, name, password, domain=""):
+        """Add a name and password that will be used
+        any time a request requires authentication."""
+        self.credentials.add(name, password, domain)
+
+    def add_certificate(self, key, cert, domain):
+        """Add a key and cert that will be used
+        any time a request requires authentication."""
+        self.certificates.add(key, cert, domain)
+
+    def clear_credentials(self):
+        """Remove all the names and passwords
+        that are used for authentication"""
+        self.credentials.clear()
+        self.authorizations = []
+
+    def _conn_request(self, conn, request_uri, method, body, headers):
+        i = 0
+        seen_bad_status_line = False
+        while i < RETRIES:
+            i += 1
+            try:
+                if hasattr(conn, 'sock') and conn.sock is None:
+                    conn.connect()
+                conn.request(method, request_uri, body, headers)
+            except socket.timeout:
+                raise
+            except socket.gaierror:
+                conn.close()
+                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
+            except ssl_SSLError:
+                conn.close()
+                raise
+            except socket.error, e:
+                err = 0
+                if hasattr(e, 'args'):
+                    err = getattr(e, 'args')[0]
+                else:
+                    err = e.errno
+                if err == errno.ECONNREFUSED: # Connection refused
+                    raise
+            except httplib.HTTPException:
+                # Just because the server closed the connection doesn't apparently mean
+                # that the server didn't send a response.
+                if hasattr(conn, 'sock') and conn.sock is None:
+                    if i < RETRIES-1:
+                        conn.close()
+                        conn.connect()
+                        continue
+                    else:
+                        conn.close()
+                        raise
+                if i < RETRIES-1:
+                    conn.close()
+                    conn.connect()
+                    continue
+            try:
+                response = conn.getresponse()
+            except httplib.BadStatusLine:
+                # If we get a BadStatusLine on the first try then that means
+                # the connection just went stale, so retry regardless of the
+                # number of RETRIES set.
+                if not seen_bad_status_line and i == 1:
+                    i = 0
+                    seen_bad_status_line = True
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    conn.close()
+                    raise
+            except (socket.error, httplib.HTTPException):
+                if i < RETRIES-1:
+                    conn.close()
+                    conn.connect()
+                    continue
+                else:
+                    conn.close()
+                    raise
+            else:
+                content = ""
+                if method == "HEAD":
+                    conn.close()
+                else:
+                    content = response.read()
+                response = Response(response)
+                if method != "HEAD":
+                    content = _decompressContent(response, content)
+            break
+        return (response, content)
+
+
+    def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
+        """Do the actual request using the connection object
+        and also follow one level of redirects if necessary"""
+
+        auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
+        auth = auths and sorted(auths)[0][1] or None
+        if auth:
+            auth.request(method, request_uri, headers, body)
+
+        (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+
+        if auth:
+            if auth.response(response, body):
+                auth.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(conn, request_uri, method, body, headers )
+                response._stale_digest = 1
+
+        if response.status == 401:
+            for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
+                authorization.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
+                if response.status != 401:
+                    self.authorizations.append(authorization)
+                    authorization.response(response, body)
+                    break
+
+        if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
+            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
+                # Pick out the location header and basically start from the beginning
+                # remembering first to strip the ETag header and decrement our 'depth'
+                if redirections:
+                    if not response.has_key('location') and response.status != 300:
+                        raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
+                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
+                    if response.has_key('location'):
+                        location = response['location']
+                        (scheme, authority, path, query, fragment) = parse_uri(location)
+                        if authority == None:
+                            response['location'] = urlparse.urljoin(absolute_uri, location)
+                    if response.status == 301 and method in ["GET", "HEAD"]:
+                        response['-x-permanent-redirect-url'] = response['location']
+                        if not response.has_key('content-location'):
+                            response['content-location'] = absolute_uri
+                        _updateCache(headers, response, content, self.cache, cachekey)
+                    if headers.has_key('if-none-match'):
+                        del headers['if-none-match']
+                    if headers.has_key('if-modified-since'):
+                        del headers['if-modified-since']
+                    if 'authorization' in headers and not self.forward_authorization_headers:
+                        del headers['authorization']
+                    if response.has_key('location'):
+                        location = response['location']
+                        old_response = copy.deepcopy(response)
+                        if not old_response.has_key('content-location'):
+                            old_response['content-location'] = absolute_uri
+                        redirect_method = method
+                        if response.status in [302, 303]:
+                            redirect_method = "GET"
+                            body = None
+                        (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
+                        response.previous = old_response
+                else:
+                    raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
+            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
+                # Don't cache 206's since we aren't going to handle byte range requests
+                if not response.has_key('content-location'):
+                    response['content-location'] = absolute_uri
+                _updateCache(headers, response, content, self.cache, cachekey)
+
+        return (response, content)
+
+    def _normalize_headers(self, headers):
+        return _normalize_headers(headers)
+
+# Need to catch and rebrand some exceptions
+# Then need to optionally turn all exceptions into status codes
+# including all socket.* and httplib.* exceptions.
+
+
+    def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
+        """ Performs a single HTTP request.
+
+        The 'uri' is the URI of the HTTP resource and can begin with either
+        'http' or 'https'. The value of 'uri' must be an absolute URI.
+
+        The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
+        etc. There is no restriction on the methods allowed.
+
+        The 'body' is the entity body to be sent with the request. It is a
+        string object.
+
+        Any extra headers that are to be sent with the request should be
+        provided in the 'headers' dictionary.
+
+        The maximum number of redirect to follow before raising an
+        exception is 'redirections. The default is 5.
+
+        The return value is a tuple of (response, content), the first
+        being and instance of the 'Response' class, the second being
+        a string that contains the response entity body.
+        """
+        try:
+            if headers is None:
+                headers = {}
+            else:
+                headers = self._normalize_headers(headers)
+
+            if not headers.has_key('user-agent'):
+                headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
+
+            uri = iri2uri(uri)
+
+            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
+            domain_port = authority.split(":")[0:2]
+            if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
+                scheme = 'https'
+                authority = domain_port[0]
+
+            proxy_info = self._get_proxy_info(scheme, authority)
+
+            conn_key = scheme+":"+authority
+            if conn_key in self.connections:
+                conn = self.connections[conn_key]
+            else:
+                if not connection_type:
+                    connection_type = SCHEME_TO_CONNECTION[scheme]
+                certs = list(self.certificates.iter(authority))
+                if scheme == 'https':
+                    if certs:
+                        conn = self.connections[conn_key] = connection_type(
+                                authority, key_file=certs[0][0],
+                                cert_file=certs[0][1], timeout=self.timeout,
+                                proxy_info=proxy_info,
+                                ca_certs=self.ca_certs,
+                                disable_ssl_certificate_validation=
+                                        self.disable_ssl_certificate_validation)
+                    else:
+                        conn = self.connections[conn_key] = connection_type(
+                                authority, timeout=self.timeout,
+                                proxy_info=proxy_info,
+                                ca_certs=self.ca_certs,
+                                disable_ssl_certificate_validation=
+                                        self.disable_ssl_certificate_validation)
+                else:
+                    conn = self.connections[conn_key] = connection_type(
+                            authority, timeout=self.timeout,
+                            proxy_info=proxy_info)
+                conn.set_debuglevel(debuglevel)
+
+            if 'range' not in headers and 'accept-encoding' not in headers:
+                headers['accept-encoding'] = 'gzip, deflate'
+
+            info = email.Message.Message()
+            cached_value = None
+            if self.cache:
+                cachekey = defrag_uri
+                cached_value = self.cache.get(cachekey)
+                if cached_value:
+                    # info = email.message_from_string(cached_value)
+                    #
+                    # Need to replace the line above with the kludge below
+                    # to fix the non-existent bug not fixed in this
+                    # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
+                    try:
+                        info, content = cached_value.split('\r\n\r\n', 1)
+                        feedparser = email.FeedParser.FeedParser()
+                        feedparser.feed(info)
+                        info = feedparser.close()
+                        feedparser._parse = None
+                    except (IndexError, ValueError):
+                        self.cache.delete(cachekey)
+                        cachekey = None
+                        cached_value = None
+            else:
+                cachekey = None
+
+            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
+                # http://www.w3.org/1999/04/Editing/
+                headers['if-match'] = info['etag']
+
+            if method not in ["GET", "HEAD"] and self.cache and cachekey:
+                # RFC 2616 Section 13.10
+                self.cache.delete(cachekey)
+
+            # Check the vary header in the cache to see if this request
+            # matches what varies in the cache.
+            if method in ['GET', 'HEAD'] and 'vary' in info:
+                vary = info['vary']
+                vary_headers = vary.lower().replace(' ', '').split(',')
+                for header in vary_headers:
+                    key = '-varied-%s' % header
+                    value = info[key]
+                    if headers.get(header, None) != value:
+                        cached_value = None
+                        break
+
+            if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
+                if info.has_key('-x-permanent-redirect-url'):
+                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
+                    if redirections <= 0:
+                        raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
+                    (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
+                    response.previous = Response(info)
+                    response.previous.fromcache = True
+                else:
+                    # Determine our course of action:
+                    #   Is the cached entry fresh or stale?
+                    #   Has the client requested a non-cached response?
+                    #
+                    # There seems to be three possible answers:
+                    # 1. [FRESH] Return the cache entry w/o doing a GET
+                    # 2. [STALE] Do the GET (but add in cache validators if available)
+                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
+                    entry_disposition = _entry_disposition(info, headers)
+
+                    if entry_disposition == "FRESH":
+                        if not cached_value:
+                            info['status'] = '504'
+                            content = ""
+                        response = Response(info)
+                        if cached_value:
+                            response.fromcache = True
+                        return (response, content)
+
+                    if entry_disposition == "STALE":
+                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
+                            headers['if-none-match'] = info['etag']
+                        if info.has_key('last-modified') and not 'last-modified' in headers:
+                            headers['if-modified-since'] = info['last-modified']
+                    elif entry_disposition == "TRANSPARENT":
+                        pass
+
+                    (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+
+                if response.status == 304 and method == "GET":
+                    # Rewrite the cache entry with the new end-to-end headers
+                    # Take all headers that are in response
+                    # and overwrite their values in info.
+                    # unless they are hop-by-hop, or are listed in the connection header.
+
+                    for key in _get_end2end_headers(response):
+                        info[key] = response[key]
+                    merged_response = Response(info)
+                    if hasattr(response, "_stale_digest"):
+                        merged_response._stale_digest = response._stale_digest
+                    _updateCache(headers, merged_response, content, self.cache, cachekey)
+                    response = merged_response
+                    response.status = 200
+                    response.fromcache = True
+
+                elif response.status == 200:
+                    content = new_content
+                else:
+                    self.cache.delete(cachekey)
+                    content = new_content
+            else:
+                cc = _parse_cache_control(headers)
+                if cc.has_key('only-if-cached'):
+                    info['status'] = '504'
+                    response = Response(info)
+                    content = ""
+                else:
+                    (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+        except Exception, e:
+            if self.force_exception_to_status_code:
+                if isinstance(e, HttpLib2ErrorWithResponse):
+                    response = e.response
+                    content = e.content
+                    response.status = 500
+                    response.reason = str(e)
+                elif isinstance(e, socket.timeout):
+                    content = "Request Timeout"
+                    response = Response({
+                        "content-type": "text/plain",
+                        "status": "408",
+                        "content-length": len(content)
+                    })
+                    response.reason = "Request Timeout"
+                else:
+                    content = str(e)
+                    response = Response({
+                        "content-type": "text/plain",
+                        "status": "400",
+                        "content-length": len(content)
+                    })
+                    response.reason = "Bad Request"
+            else:
+                raise
+
+
+        return (response, content)
+
+    def _get_proxy_info(self, scheme, authority):
+        """Return a ProxyInfo instance (or None) based on the scheme
+        and authority.
+        """
+        hostname, port = urllib.splitport(authority)
+        proxy_info = self.proxy_info
+        if callable(proxy_info):
+            proxy_info = proxy_info(scheme)
+
+        if (hasattr(proxy_info, 'applies_to')
+            and not proxy_info.applies_to(hostname)):
+            proxy_info = None
+        return proxy_info
+
+
+class Response(dict):
+    """An object more like email.Message than httplib.HTTPResponse."""
+
+    """Is this response from our local cache"""
+    fromcache = False
+
+    """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
+    version = 11
+
+    "Status code returned by server. "
+    status = 200
+
+    """Reason phrase returned by server."""
+    reason = "Ok"
+
+    previous = None
+
+    def __init__(self, info):
+        # info is either an email.Message or
+        # an httplib.HTTPResponse object.
+        if isinstance(info, httplib.HTTPResponse):
+            for key, value in info.getheaders():
+                self[key.lower()] = value
+            self.status = info.status
+            self['status'] = str(self.status)
+            self.reason = info.reason
+            self.version = info.version
+        elif isinstance(info, email.Message.Message):
+            for key, value in info.items():
+                self[key.lower()] = value
+            self.status = int(self['status'])
+        else:
+            for key, value in info.iteritems():
+                self[key.lower()] = value
+            self.status = int(self.get('status', self.status))
+            self.reason = self.get('reason', self.reason)
+
+
+    def __getattr__(self, name):
+        if name == 'dict':
+            return self
+        else:
+            raise AttributeError, name
diff --git a/httplib2/ca_certs_locater.py b/httplib2/ca_certs_locater.py
new file mode 100644
index 0000000..e4946aa
--- /dev/null
+++ b/httplib2/ca_certs_locater.py
@@ -0,0 +1,32 @@
+"""Custom locater for CA_CERTS files for google3 code."""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import os
+
+from google3.pyglib import resources
+
+
+# pylint: disable-msg=g-bad-name
+def get():
+  """Locate the ca_certs.txt file.
+
+  The httplib2 library will look for local ca_certs_locater module to override
+  the default location for the ca_certs.txt file. We override it here to first
+  try loading via pyglib.resources, falling back to the traditional method if
+  that fails.
+
+  Returns:
+    The file location returned as a string.
+  """
+  try:
+    ca_certs = resources.GetResourceFilename(
+        'google3/third_party/py/httplib2/cacerts.txt')
+  except (IOError, AttributeError):
+    # We're either running in an environment where we don't have access to
+    # google3.pyglib.resources, or an environment where it won't work correctly
+    # (e.g., //apphosting/tools:dev_appserver_internal_main). In either of these
+    # cases, we fall back on the os.path.join approach.
+    ca_certs = os.path.join(
+        os.path.dirname(os.path.abspath(__file__)), 'cacerts.txt')
+  return ca_certs
diff --git a/httplib2/cacerts.txt b/httplib2/cacerts.txt
new file mode 100644
index 0000000..d8a0027
--- /dev/null
+++ b/httplib2/cacerts.txt
@@ -0,0 +1,739 @@
+# Certifcate Authority certificates for validating SSL connections.
+#
+# This file contains PEM format certificates generated from
+# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt
+#
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is the Netscape security libraries.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1994-2000
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+
+Verisign/RSA Secure Server CA
+=============================
+
+-----BEGIN CERTIFICATE-----
+MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD
+VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0
+MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV
+BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy
+dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ
+ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII
+0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI
+uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI
+hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3
+YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc
+1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA==
+-----END CERTIFICATE-----
+
+Thawte Personal Basic CA
+========================
+
+-----BEGIN CERTIFICATE-----
+MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
+VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
+ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj
+IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X
+DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw
+EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE
+ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy
+dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD
+QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN
+BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53
+dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK
+wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7
+G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF
+AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7
+c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P
+9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ==
+-----END CERTIFICATE-----
+
+Thawte Personal Premium CA
+==========================
+
+-----BEGIN CERTIFICATE-----
+MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
+VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
+ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p
+dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv
+bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa
+QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY
+BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u
+IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl
+bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu
+Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs
+Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI
+Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD
+ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
+SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH
+b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh
+KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ
+-----END CERTIFICATE-----
+
+Thawte Personal Freemail CA
+===========================
+
+-----BEGIN CERTIFICATE-----
+MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
+VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
+ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt
+YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu
+Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT
+AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa
+MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG
+cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh
+d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY
+DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E
+rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq
+uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP
+MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa
+/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei
+gQ==
+-----END CERTIFICATE-----
+
+Thawte Server CA
+================
+
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+Thawte Premium Server CA
+========================
+
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+Equifax Secure CA
+=================
+
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+Verisign Class 1 Public Primary Certification Authority
+=======================================================
+
+-----BEGIN CERTIFICATE-----
+MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh
+c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05
+NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD
+VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp
+bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB
+jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N
+H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR
+4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN
+BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo
+EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5
+FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx
+lA==
+-----END CERTIFICATE-----
+
+Verisign Class 2 Public Primary Certification Authority
+=======================================================
+
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh
+YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7
+FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G
+CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg
+J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc
+r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY
+-----END CERTIFICATE-----
+
+Verisign Class 3 Public Primary Certification Authority
+=======================================================
+
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
+lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
+AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+-----END CERTIFICATE-----
+
+Verisign Class 1 Public Primary Certification Authority - G2
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK
+VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm
+Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J
+h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul
+uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68
+DzFc6PLZ
+-----END CERTIFICATE-----
+
+Verisign Class 2 Public Primary Certification Authority - G2
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns
+YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y
+aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe
+Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj
+IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx
+KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM
+HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw
+DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC
+AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji
+nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX
+rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn
+jBJ7xUS0rg==
+-----END CERTIFICATE-----
+
+Verisign Class 3 Public Primary Certification Authority - G2
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+Verisign Class 4 Public Primary Certification Authority - G2
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM
+HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK
+qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj
+cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y
+cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP
+T8qAkbYp
+-----END CERTIFICATE-----
+
+Verisign Class 1 Public Primary Certification Authority - G3
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4
+nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO
+8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV
+ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb
+PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2
+6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr
+n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a
+qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4
+wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3
+ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs
+pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4
+E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g==
+-----END CERTIFICATE-----
+
+Verisign Class 2 Public Primary Certification Authority - G3
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy
+aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s
+IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp
+Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV
+BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp
+Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu
+Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g
+Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt
+IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU
+J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO
+JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY
+wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o
+koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN
+qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E
+Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe
+xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u
+7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU
+sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI
+sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP
+cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q
+-----END CERTIFICATE-----
+
+Verisign Class 3 Public Primary Certification Authority - G3
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+Verisign Class 4 Public Primary Certification Authority - G3
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+Equifax Secure Global eBusiness CA
+==================================
+
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+Equifax Secure eBusiness CA 1
+=============================
+
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+Equifax Secure eBusiness CA 2
+=============================
+
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
+Thawte Time Stamping CA
+=======================
+
+-----BEGIN CERTIFICATE-----
+MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN
+BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd
+BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN
+MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g
+Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG
+A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l
+c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT
+6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa
+Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL
+8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC
+9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ
+pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ
+CayJSdM=
+-----END CERTIFICATE-----
+
+thawte Primary Root CA
+======================
+
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+VeriSign Class 3 Public Primary Certification Authority - G5
+============================================================
+
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+Entrust.net Secure Server Certification Authority
+=================================================
+
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+Go Daddy Certification Authority Root Certificate Bundle
+========================================================
+
+-----BEGIN CERTIFICATE-----
+MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx
+ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g
+RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw
+MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH
+QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j
+b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j
+b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H
+KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm
+VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR
+SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT
+cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ
+6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu
+MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS
+kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB
+BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f
+BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv
+c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH
+AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO
+BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG
+OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU
+A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o
+0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX
+RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH
+qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV
+U+4=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh
+bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu
+Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g
+QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe
+BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX
+DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE
+YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC
+ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv
+2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q
+N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO
+r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN
+f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH
+U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU
+TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb
+VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg
+SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv
+biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg
+MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw
+AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv
+ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu
+Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd
+IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv
+bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1
+QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O
+WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf
+SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+GeoTrust Global CA
+==================
+
+-----BEGIN CERTIFICATE-----
+MIIDfTCCAuagAwIBAgIDErvmMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNVBAYTAlVT
+MRAwDgYDVQQKEwdFcXVpZmF4MS0wKwYDVQQLEyRFcXVpZmF4IFNlY3VyZSBDZXJ0
+aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDIwNTIxMDQwMDAwWhcNMTgwODIxMDQwMDAw
+WjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UE
+AxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9m
+OSm9BXiLnTjoBbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIu
+T8rxh0PBFpVXLVDviS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6c
+JmTM386DGXHKTubU1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmR
+Cw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5asz
+PeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo4HwMIHtMB8GA1UdIwQYMBaAFEjm
+aPkr0rKV10fYIyAQTzOYkJ/UMB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrM
+TjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjA6BgNVHR8EMzAxMC+g
+LaArhilodHRwOi8vY3JsLmdlb3RydXN0LmNvbS9jcmxzL3NlY3VyZWNhLmNybDBO
+BgNVHSAERzBFMEMGBFUdIAAwOzA5BggrBgEFBQcCARYtaHR0cHM6Ly93d3cuZ2Vv
+dHJ1c3QuY29tL3Jlc291cmNlcy9yZXBvc2l0b3J5MA0GCSqGSIb3DQEBBQUAA4GB
+AHbhEm5OSxYShjAGsoEIz/AIx8dxfmbuwu3UOx//8PDITtZDOLC5MH0Y0FWDomrL
+NhGc6Ehmo21/uBPUR/6LWlxz/K7ZGzIZOKuXNBSqltLroxwUCEm2u+WR74M26x1W
+b8ravHNjkOR/ez4iyz0H7V84dJzjA1BOoa+Y7mHyhD8S
+-----END CERTIFICATE-----
+
diff --git a/httplib2/iri2uri.py b/httplib2/iri2uri.py
new file mode 100644
index 0000000..d88c91f
--- /dev/null
+++ b/httplib2/iri2uri.py
@@ -0,0 +1,110 @@
+"""
+iri2uri
+
+Converts an IRI to a URI.
+
+"""
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__version__ = "1.0.0"
+__license__ = "MIT"
+__history__ = """
+"""
+
+import urlparse
+
+
+# Convert an IRI to a URI following the rules in RFC 3987
+#
+# The characters we need to enocde and escape are defined in the spec:
+#
+# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
+# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
+#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
+#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
+#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
+#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
+#         / %xD0000-DFFFD / %xE1000-EFFFD
+
+escape_range = [
+    (0xA0, 0xD7FF),
+    (0xE000, 0xF8FF),
+    (0xF900, 0xFDCF),
+    (0xFDF0, 0xFFEF),
+    (0x10000, 0x1FFFD),
+    (0x20000, 0x2FFFD),
+    (0x30000, 0x3FFFD),
+    (0x40000, 0x4FFFD),
+    (0x50000, 0x5FFFD),
+    (0x60000, 0x6FFFD),
+    (0x70000, 0x7FFFD),
+    (0x80000, 0x8FFFD),
+    (0x90000, 0x9FFFD),
+    (0xA0000, 0xAFFFD),
+    (0xB0000, 0xBFFFD),
+    (0xC0000, 0xCFFFD),
+    (0xD0000, 0xDFFFD),
+    (0xE1000, 0xEFFFD),
+    (0xF0000, 0xFFFFD),
+    (0x100000, 0x10FFFD),
+]
+
+def encode(c):
+    retval = c
+    i = ord(c)
+    for low, high in escape_range:
+        if i < low:
+            break
+        if i >= low and i <= high:
+            retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
+            break
+    return retval
+
+
+def iri2uri(uri):
+    """Convert an IRI to a URI. Note that IRIs must be
+    passed in a unicode strings. That is, do not utf-8 encode
+    the IRI before passing it into the function."""
+    if isinstance(uri ,unicode):
+        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
+        authority = authority.encode('idna')
+        # For each character in 'ucschar' or 'iprivate'
+        #  1. encode as utf-8
+        #  2. then %-encode each octet of that utf-8
+        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
+        uri = "".join([encode(c) for c in uri])
+    return uri
+
+if __name__ == "__main__":
+    import unittest
+
+    class Test(unittest.TestCase):
+
+        def test_uris(self):
+            """Test that URIs are invariant under the transformation."""
+            invariant = [
+                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
+                u"http://www.ietf.org/rfc/rfc2396.txt",
+                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
+                u"mailto:John.Doe@example.com",
+                u"news:comp.infosystems.www.servers.unix",
+                u"tel:+1-816-555-1212",
+                u"telnet://192.0.2.16:80/",
+                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
+            for uri in invariant:
+                self.assertEqual(uri, iri2uri(uri))
+
+        def test_iri(self):
+            """ Test that the right type of escaping is done for each part of the URI."""
+            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
+            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
+            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
+            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
+            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
+            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
+
+    unittest.main()
+
+
diff --git a/lib/cacerts/urlfetch_cacerts.txt b/lib/cacerts/urlfetch_cacerts.txt
index 3e37045..75f3990 100644
--- a/lib/cacerts/urlfetch_cacerts.txt
+++ b/lib/cacerts/urlfetch_cacerts.txt
@@ -4562,32 +4562,6 @@
 R1uUq27UlTMdphVx8fiUylQ5PsE=
 -----END CERTIFICATE-----
 
-subject= /C=US/OU=American Express Technologies/ST=NY/CN=American Express Channel Server CA 3
-serial=0400040B
------BEGIN CERTIFICATE-----
-MIID0DCCAzmgAwIBAgIEBAAECzANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MB4XDTA2MDcyMDEyMjAwMFoXDTEzMDcyMDIzNTkwMFowcTELMAkG
-A1UEBhMCVVMxJjAkBgNVBAsTHUFtZXJpY2FuIEV4cHJlc3MgVGVjaG5vbG9naWVz
-MQswCQYDVQQIEwJOWTEtMCsGA1UEAxMkQW1lcmljYW4gRXhwcmVzcyBDaGFubmVs
-IFNlcnZlciBDQSAzMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDTeIiQKppj
-lmOpeWC8aTY2CmcLvs0kIUXjQ19Mr80Je/a7Z3LS9S4tphLv0RNdvq4ZtaxarfUZ
-NZpr+USa6wzol46A5dU8YFt/yVN9AmGLQOmHu9/1B1s0l7o9yI8TrTDY/aJBqq/g
-tgdaNCVbvHdd2zrV3DEQMb2ahZFGXcz1tQIDAQABo4IBbzCCAWswRQYDVR0fBD4w
-PDA6oDigNoY0aHR0cDovL3d3dy5wdWJsaWMtdHJ1c3QuY29tL2NnaS1iaW4vQ1JM
-LzIwMTgvY2RwLmNybDAdBgNVHQ4EFgQUJXSrcybYiFGQWJq2AYSqhcpfo9AwUwYD
-VR0gBEwwSjBIBgkrBgEEAbE+AQAwOzA5BggrBgEFBQcCARYtaHR0cDovL3d3dy5w
-dWJsaWMtdHJ1c3QuY29tL0NQUy9PbW5pUm9vdC5odG1sMIGJBgNVHSMEgYEwf6F5
-pHcwdTELMAkGA1UEBhMCVVMxGDAWBgNVBAoTD0dURSBDb3Jwb3JhdGlvbjEnMCUG
-A1UECxMeR1RFIEN5YmVyVHJ1c3QgU29sdXRpb25zLCBJbmMuMSMwIQYDVQQDExpH
-VEUgQ3liZXJUcnVzdCBHbG9iYWwgUm9vdIICAaUwDgYDVR0PAQH/BAQDAgHGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwDQYJKoZIhvcNAQEFBQADgYEAgJdgB6j0+q1tvQ0j
-Z6PkgUmpCBc0glpm8g60cYGDPFCnTSjx0xZewi7tBstql1TSAPY+zz/MJscAy1ST
-HHXE+iYdS/RuVIcIxY/nFhG3/2yo2QEn6H43CPdol8z45A9g1uepOu4Y44G+ACpM
-jqIsyd1WMO5PmwlZ+/tLi50XRLc=
------END CERTIFICATE-----
-
 subject= /C=TW/O=TAIWAN-CA.COM Inc./OU=SSL Certification Service Provider/CN=TaiCA Secure CA
 serial=07274E79
 -----BEGIN CERTIFICATE-----
@@ -6932,34 +6906,6 @@
 4mHgmbanHZdGU8fLQD7DfPIR7rTlOBQG+qgkRT3TYA==
 -----END CERTIFICATE-----
 
-subject= /O=TRUSTSIGN IN CERTIFICADORA LTDA/CN=CA ROOT TRUSTSIGN
-serial=24EC50F519E036B9C0AC0E27249167AB
------BEGIN CERTIFICATE-----
-MIIEOzCCAyOgAwIBAgIQJOxQ9RngNrnArA4nJJFnqzANBgkqhkiG9w0BAQUFADA6
-MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
-dHkgMjA0OCBWMzAeFw0xMTAxMjUxNTU1MzBaFw0xNDAyMDExNTQ3NDJaMEYxKDAm
-BgNVBAoTH1RSVVNUU0lHTiBJTiBDRVJUSUZJQ0FET1JBIExUREExGjAYBgNVBAMT
-EUNBIFJPT1QgVFJVU1RTSUdOMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvm7vV59MIOhu/ClT/x9ZoSK82J3XhHOOV2Kc/5KUMhCzWeEJu07FruM15SiS
-FTVUWEbe/l28MVLJ7P12tlkRtM9e+ATjEMQiTFYg/6MQjxONPtWVIL4pIwhJ7ou1
-B/2kTxM66Ck7LumOOscrzd48or34I4fK+E/BjJFD0MoW++LKwC1Qeabd2CxuBheV
-ZO8RjrHt7/m1Idk2fy+pG/wYBNzvNhz2pkXhXEpjyu1LdkHTYDRbmWvd19bYMYvm
-NIiInBSn5gD0DXPL4Z7q9Ws0VfbaUpwoi8wPanvWEVsQ+9ljuL2hd5eaTCw12Wel
-83qGTNn7LI/DVpPy+Br5hdyevQIDAQABo4IBLzCCASswHwYDVR0jBBgwFoAUB8NR
-MKSq6UWuNST6/yQsM9CxnYwwUwYDVR0gBEwwSjBIBgkqhkiG9w0FBgEwOzA5Bggr
-BgEFBQcCARYtaHR0cDovL3d3dy50cnVzdHNpZ24uY29tLmJyL2ZpbGVzL3JzYS9k
-cGMucGRmMHAGA1UdHwRpMGcwZaBjoGGGX2h0dHA6Ly93d3cucnNhc2VjdXJpdHku
-Y29tL3Byb2R1Y3RzL2tlb24vcmVwb3NpdG9yeS9jZXJ0aWZpY2F0ZV9zdGF0dXMv
-UlNBX1NlY3VyaXR5XzIwNDhfdjMuQ1JMMB0GA1UdDgQWBBQQUBskQ8KXQDYyz5nT
-mO0ybgx/UDAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADANBgkq
-hkiG9w0BAQUFAAOCAQEAaML9Lj5kPtUpc2+yOI7Bqh8yTwUYjpvVFpLDsVTpO+Kz
-XkaCShwmvP7V2LPdPZyUx8bDcqO3ow3WLruf7PHi2tOfb2ab0r8zV8OjJU5lPy3X
-xxQRC+WxRzvfpAZKuLp6FJZ5zibsJxV9PXYgaB2kzoOc224TJUsHIUN2vSMyS0Pm
-+Jo69YePdSNSt9dYbP3rrWxyNxb740bZ0PFvU2cgc/QPir5iKDth2RhJ1BBKIyIz
-E4gdIJIKPdhkIoZGbTDD+BbpuSE4PkEj7zAb00Ipbs6O59gl+pQ3coxJu6MYQtnV
-swE5CQJdGpQfHM0etEoIF/q2qqfPrwB4FypXn0zVdg==
------END CERTIFICATE-----
-
 subject= /C=UK/O=Vodafone Group/CN=Vodafone (Secure Sites)
 serial=07272577
 -----BEGIN CERTIFICATE-----
@@ -7184,34 +7130,6 @@
 jDicff6h0Y4YZi4IvQZheYVU+9wLU0cxkAqap4ebEOB620M=
 -----END CERTIFICATE-----
 
-subject= /C=CA/O=Syncrude Canada Ltd/CN=Syncrude Corporate Policy CA 2
-serial=040003A6
------BEGIN CERTIFICATE-----
-MIIEPzCCA6igAwIBAgIEBAADpjANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MB4XDTA0MTAyNjE4MjcwMFoXDTE0MTAyNjIzNTkwMFowVDELMAkG
-A1UEBhMCQ0ExHDAaBgNVBAoTE1N5bmNydWRlIENhbmFkYSBMdGQxJzAlBgNVBAMT
-HlN5bmNydWRlIENvcnBvcmF0ZSBQb2xpY3kgQ0EgMjCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAPaSbcz39pNz42KKHzNynvaEbw1VJd+yCgXCaTQIx8zx
-tKoSyw9dq1ePj2ddLNMGnW7R60kvRDo9bps0jSoRT/QxX8O8aU8zvpF+ZAqTNgY2
-RGZzlylSwH9UBC/ZPiFftOOSwAII6uI+xdxXZ4BEAK5DHw6VdZJchuhch5edu8TD
-pWX+U+8xXJQ8M88i773+GNurhsgVILEdOl0KqKafsfLa9UJcSF9f+JwlQuiMRjqa
-HvycheRh8xfgeA4LVitCYJXa/UVT7JBl/zSK05I9V1H+YkT0klGnUHnPgbdvoSz6
-oGCHlgvwScOTIGLAhPSJ5aiMCAY5ZKd013NKdje2GvUCAwEAAaOCAXcwggFzMEUG
-A1UdHwQ+MDwwOqA4oDaGNGh0dHA6Ly93d3cucHVibGljLXRydXN0LmNvbS9jZ2kt
-YmluL0NSTC8yMDE4L2NkcC5jcmwwHQYDVR0OBBYEFFZKlN8wEBlZD8TDSYWE9UGA
-I5LFMFsGA1UdIARUMFIwSAYJKwYBBAGxPgEAMDswOQYIKwYBBQUHAgEWLWh0dHA6
-Ly93d3cucHVibGljLXRydXN0LmNvbS9DUFMvT21uaVJvb3QuaHRtbDAGBgRVHSAA
-MIGJBgNVHSMEgYEwf6F5pHcwdTELMAkGA1UEBhMCVVMxGDAWBgNVBAoTD0dURSBD
-b3Jwb3JhdGlvbjEnMCUGA1UECxMeR1RFIEN5YmVyVHJ1c3QgU29sdXRpb25zLCBJ
-bmMuMSMwIQYDVQQDExpHVEUgQ3liZXJUcnVzdCBHbG9iYWwgUm9vdIICAaUwDgYD
-VR0PAQH/BAQDAgGGMBIGA1UdEwEB/wQIMAYBAf8CAQEwDQYJKoZIhvcNAQEFBQAD
-gYEANp/gkdGOXtNZxoXiSp0onq6Y2qPDAqnZVxAbkuWBhyx3IeJH6gkn0psSqFyt
-eM8Do31U/16bCMvOh/KYjwdbQExcYdyxWJQyUtMn9pdw65HJ2a0jHFFj+iUvLg5W
-b06l9DDUIx9ULxbXViVcKAWx20e0OaIvqE44GRi2t122a88=
------END CERTIFICATE-----
-
 subject= /C=BE/O=Certipost s.a./n.v./CN=Certipost E-Trust Primary Normalised CA
 serial=07271435
 -----BEGIN CERTIFICATE-----
@@ -7324,34 +7242,6 @@
 mIzroG3RNlOQoI8WMB7ew79/RqWVKvnI3jvbd/TyMrEzYaIwNQ==
 -----END CERTIFICATE-----
 
-subject= /C=FR/O=Minist\xC3\xA8re \xC3\x89cologie, D\xC3\xA9veloppement et Am\xC3\xA9nagement durables/OU=0002 120066022/CN=AC Serveurs
-serial=12
------BEGIN CERTIFICATE-----
-MIIEQjCCAyqgAwIBAgIBEjANBgkqhkiG9w0BAQUFADCBhjELMAkGA1UEBhMCRlIx
-RjBEBgNVBAoMPU1pbmlzdMOocmUgw4ljb2xvZ2llLCBEw6l2ZWxvcHBlbWVudCBl
-dCBBbcOpbmFnZW1lbnQgZHVyYWJsZXMxFzAVBgNVBAsMDjAwMDIgMTIwMDY2MDIy
-MRYwFAYDVQQDDA1BQyBNaW5pc3TDqHJlMB4XDTA3MTAwNTEwMjkyNFoXDTEzMTAw
-NTEwMjkyNFowgYQxCzAJBgNVBAYTAkZSMUYwRAYDVQQKDD1NaW5pc3TDqHJlIMOJ
-Y29sb2dpZSwgRMOpdmVsb3BwZW1lbnQgZXQgQW3DqW5hZ2VtZW50IGR1cmFibGVz
-MRcwFQYDVQQLDA4wMDAyIDEyMDA2NjAyMjEUMBIGA1UEAwwLQUMgU2VydmV1cnMw
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCiTWRq9nCTaXnLRZ4C8/Dc
-S03SPcVM6Ks/2UISD9+u0CoHyb6cMBtLM9kByxjkBBaqrCuTssybHpO2xtAljRMa
-/DKyYaqwmsCF7Xhsrdaigysvue4UDsarqvJQRK5bafRRfM+2Adl2OjPV05sQBUzt
-lI9X7N3dGnk571eFCjLiWCaloTmq0bgPvw6u6oV5hzz3Xg/gjjgm9rxmIZN2OdUh
-lA9kLrJycFFaYkMsflWTyOV8caaho3npMp+AK7q/yI6/UzH5L2R/awEz5asOIv+A
-lUr/n9iQJ2qsKi8tan7p6V07kSgfxqOy4Alf/G2FzQfaHardLbQLcJqjbSft0ta5
-AgMBAAGjgbowgbcwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYD
-VR0OBBYEFE2hUbCf/teHqW3EMY/94GUpALwFMB8GA1UdIwQYMBaAFAU3s/4441Se
-oBCK2kORYyNT1N0LMBgGA1UdIAQRMA8wDQYLKwYBBAH5EAMBAgEwOgYDVR0fBDMw
-MTAvoC2gK4YpaHR0cDovL2lnYy5tZWRhZC5nb3V2LmZyL0FDLU1pbmlzdGVyZS5j
-cmwwDQYJKoZIhvcNAQEFBQADggEBACiqAe3xggIGzZ9svC4w4Q40YKOjz/1JrOVo
-EN/n4q+LWUcZAVrGObJ+DA08I68jJT+gJt9CxjJRpGdZb2T5sDG1vJeTJTek5+qv
-KVXBas5Y0D2uHJcTyDIJXzK36lmkLUdU/Dh/huK15lZPRmpd5zjjiP+Fm2miyyZa
-YZeEr7jU/hHgnyeus2Zgkd7/jPunM7j4O326tH/6J81wuS+rcHvwL0Lrh1Fkazrf
-TN+4Ebb+ZnDcoPX+d80kfOP2SALQ/8sk4QuPuPWsKXFdd3B61fEQ8wF092/B4TEw
-FW8j62gjYNih7W7hdPGt6X5n+lIr7iBn55+KpFHS4eNl6iGsqhg=
------END CERTIFICATE-----
-
 subject= /C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert High Assurance EV Root CA
 serial=4287250E
 -----BEGIN CERTIFICATE-----
@@ -8117,35 +8007,6 @@
 Oq0l87Dq
 -----END CERTIFICATE-----
 
-subject= /OU=Extended Validation CA/O=GlobalSign/CN=GlobalSign Extended Validation CA
-serial=0400000000011256AD5FB2
------BEGIN CERTIFICATE-----
-MIIEUjCCAzqgAwIBAgILBAAAAAABElatX7IwDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDcwNTA0MTAwMDAwWhcNMjExMjE1
-MDcwMDAwWjBiMR8wHQYDVQQLExZFeHRlbmRlZCBWYWxpZGF0aW9uIENBMRMwEQYD
-VQQKEwpHbG9iYWxTaWduMSowKAYDVQQDEyFHbG9iYWxTaWduIEV4dGVuZGVkIFZh
-bGlkYXRpb24gQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1Emt6
-QwnnRLFD0fgyHn2XJOtkPy/ieGsyjWyyRakXgmd/EWGtkLNGhEAjYVDxE70u/758
-XoXH2Q0c9e5Ecde4j4W2DkbMSSM1ITsl+lAtS33NK1Q/j+opI+qTDvyqhzbEZtcO
-c/Xc8P0H+cGPeqBI397N2xkYMMrYg/F5rDaKn6jA0onrYKTtKlXFFQxjK8cLfjHO
-wZsr+oalY032uubczjUhZs9YHMjzqi+My80kugG9J73V72nVdaHiorih7oivdX9f
-8tIyEHc1H2n5AmKEf4onD3/6tr6Zcq59rJtyD1jDy8gStbiA4cCxwNHRhjX8cXwg
-Lva4p7D8hCA2NoaZAgMBAAGjggEdMIIBGTAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0T
-AQH/BAgwBgEB/wIBADBGBgNVHSAEPzA9MDsGBFUdIAAwMzAxBggrBgEFBQcCARYl
-aHR0cDovL3d3dy5nbG9iYWxzaWduLm5ldC9yZXBvc2l0b3J5LzAdBgNVHQ4EFgQU
-NLH5yYxrNUTMCGkK7uOjuVy/FuAwNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2Ny
-bC5nbG9iYWxzaWduLm5ldC9yb290LXIyLmNybDARBglghkgBhvhCAQEEBAMCAgQw
-IAYDVR0lBBkwFwYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMB8GA1UdIwQYMBaAFJvi
-B1dnHB7AagbeWbSaLd/cGYYuMA0GCSqGSIb3DQEBBQUAA4IBAQBCEHNxOCzkb5kQ
-Gi6v/bAe/0Q0bbPE0KgFNhoMaU6CiZhv8vgdqUYm47+5hRqjB54Fpay7nXm9helG
-91WStyacHjZ8hAhGyoPqqYJ3xfbvhzvY3wmt4GyQZmVj7q+pL/OPEQ5XBEv7tBG4
-mgEKZ6kJwQfY3xt1hhToEPmLBXQmtaWo8uFWX/waPh2jhys9VijcWnzrisCmv5QB
-SaO7lGsEOH3/tOoyxUwhue4D79uPT9GPpUU/UFpaWaon10OO4a0arn5XOKCvlgJv
-cIFElwuJ0KJZPv78kWpGwjqFnVU590H5pFz6GpcgrJyx+IeBQW2+iPfx04FPKYyx
-oVV8eLqw
------END CERTIFICATE-----
-
 subject= /C=AU/O=Department of Education and Training/OU=ITD/CN=NSW-DET-INT-CA1
 serial=0400000000012507408E29
 -----BEGIN CERTIFICATE-----
@@ -9016,35 +8877,6 @@
 jqgeD/fMGD1Vg9aKFzIGP/rJbC4pzhtW
 -----END CERTIFICATE-----
 
-subject= /C=FR/O=Secteur public \xC3\x89cologie D\xC3\xA9veloppement et Am\xC3\xA9nagement durables/OU=0002 120066022/CN=AC Racine
-serial=1121AE1D2355B5369F0A99AFEAB69227C8BD
------BEGIN CERTIFICATE-----
-MIIEZDCCA0ygAwIBAgISESGuHSNVtTafCpmv6raSJ8i9MA0GCSqGSIb3DQEBBQUA
-MIGFMQswCQYDVQQGEwJGUjEPMA0GA1UECBMGRnJhbmNlMQ4wDAYDVQQHEwVQYXJp
-czEQMA4GA1UEChMHUE0vU0dETjEOMAwGA1UECxMFRENTU0kxDjAMBgNVBAMTBUlH
-Qy9BMSMwIQYJKoZIhvcNAQkBFhRpZ2NhQHNnZG4ucG0uZ291di5mcjAeFw0wOTEy
-MTQxNjAwMDBaFw0xOTEwMDUxNTAwMDBaMIGFMQswCQYDVQQGEwJGUjFJMEcGA1UE
-CgxAU2VjdGV1ciBwdWJsaWMgw4ljb2xvZ2llIETDqXZlbG9wcGVtZW50IGV0IEFt
-w6luYWdlbWVudCBkdXJhYmxlczEXMBUGA1UECwwOMDAwMiAxMjAwNjYwMjIxEjAQ
-BgNVBAMMCUFDIFJhY2luZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AOzWUdLvrBQ1zxlOXIe047jFhJRggi3IL5lmG3IVI0ijtofnZ+aoz7M7Pc0vlekd
-uUUkyoXjPOmk4+Sa7BEv9upmFDVthscKKGw7QAalTLgNvnvB7hX1I8B4HNXRMBK6
-0qfyFSjbyBVzOMzQ1e+49T4QGhhKlpFkEbkowbCVJGYYtIOhvxEoeqs84MmYy9LY
-ZHnzlUabYJ3NjkmaELEWu3SXj4M8Hsiin06ZXX/xsV9eXLDoKocyubrMGw+M57MA
-O7xKRIJlzysursMvu5EWBeMVthwRQNK4T98Sa5d5OjGOqtRBGX4rhd8fHk4A2v2c
-yyf6jDZryiBMsCSmTALgdMcCAwEAAaOByzCByDAOBgNVHQ8BAf8EBAMCAQYwFQYD
-VR0gBA4wDDAKBggqgXoBeQEBAjAPBgNVHRMBAf8EBTADAQH/ME4GA1UdHwRHMEUw
-Q6BBoD+GPWh0dHA6Ly9pZ2MuYXBwbGljYXRpb24uZGV2ZWxvcHBlbWVudC1kdXJh
-YmxlLmdvdXYuZnIvaWdjYS5jcmwwHQYDVR0OBBYEFBWSEu7OCJ7zxGdPHwM+5JAh
-oWv9MB8GA1UdIwQYMBaAFKMFLxhgUMKJCt0rIU//jk6oMDE2MA0GCSqGSIb3DQEB
-BQUAA4IBAQAUQxzXXCvVdlm9cW0nmujCAwt5C/U0AUDAykwys39lNnY6xOcmd6cA
-sU7zTHMyqpvq82N2PM56BYnBLQF4ZmvDttzr10EMC/McMfCqtNEoNlTKYrIgREBp
-VINCC7nxrCNFyff7U460t0Mn5vrr/Ide6vPVpcHxMqwAEAKm3g2IZLXpS2IUp76B
-CXftabG/Bhu6dYset5BM/Lifn9de5orUQSclQYJRGxfrK2D5CJoAHOkOCXZ61X38
-/PLyDc4J+PeMLaWcbtWxQi3QASzJICzdaAvwV9AaYTYqHOkvMtdgp6PQemzZPFgn
-VvJUmp/nDQSp/FGVflsM4KCPUVRXV39t
------END CERTIFICATE-----
-
 subject= /C=NL/O=Intermediate Certificate/OU=Domain Validated SSL/CN=Intermediate Certificate DV SSL CA
 serial=023A60
 -----BEGIN CERTIFICATE-----
@@ -9132,35 +8964,6 @@
 uoyLIp1kXkLCsr86NhLteDbogeVqYXO1Tg==
 -----END CERTIFICATE-----
 
-subject= /C=FR/O=Minist\xC3\xA8re \xC3\x89cologie, D\xC3\xA9veloppement et Am\xC3\xA9nagement durables/OU=0002 120066022/CN=AC Minist\xC3\xA8re
-serial=42
------BEGIN CERTIFICATE-----
-MIIEZjCCA06gAwIBAgIBQjANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCRlIx
-STBHBgNVBAoMQFNlY3RldXIgcHVibGljIMOJY29sb2dpZSBEw6l2ZWxvcHBlbWVu
-dCBldCBBbcOpbmFnZW1lbnQgZHVyYWJsZXMxFzAVBgNVBAsMDjAwMDIgMTIwMDY2
-MDIyMRIwEAYDVQQDDAlBQyBSYWNpbmUwHhcNMDkxMjAyMTAwNDMxWhcNMTYxMDA1
-MDkwNDMxWjCBhjELMAkGA1UEBhMCRlIxRjBEBgNVBAoMPU1pbmlzdMOocmUgw4lj
-b2xvZ2llLCBEw6l2ZWxvcHBlbWVudCBldCBBbcOpbmFnZW1lbnQgZHVyYWJsZXMx
-FzAVBgNVBAsMDjAwMDIgMTIwMDY2MDIyMRYwFAYDVQQDDA1BQyBNaW5pc3TDqHJl
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0sqb5EhMe2es6whM/YGj
-jpJTwQtUbYgr/Mz0BQJooF6HAQ8gpVsoLY08pqHYnaUqefGirGJCPufAfX2BOfCV
-93UeNQS52VVt1Wwl/wW+yjmy5RdJ9iEqCqli9mEGY+pL8DldGvSNIYSmtwZfvyJR
-sp2YiYvuM/q03cyknKltlZlPEUYherlMc3j/M3RhRGQWeh8ruGy/+rwMfpH/z0jz
-3CQlwsThBPbStpnFXjp3ZEgZSskBK1jt7FBlS0GVlp0FlCxZMzqToC3HMwsw/Zvm
-QU/G+EEiRbR5e5Ovqy0ftSAp1iVLiNWjtxRiSFQJt9oHMZVHxp+f3FbqVMqt3F+v
-ewIDAQABo4HdMIHaMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB8G
-A1UdIwQYMBaAFBWSEu7OCJ7zxGdPHwM+5JAhoWv9MB0GA1UdDgQWBBQFN7P+OONU
-nqAQitpDkWMjU9TdCzBdBgNVHR8EVjBUMFKgUKBOhkxodHRwOi8vaWdjLmFwcGxp
-Y2F0aW9uLmRldmVsb3BwZW1lbnQtZHVyYWJsZS5nb3V2LmZyL0FDX1JhY2luZS9j
-cmwvY3JsLTEuY3JsMBgGA1UdIAQRMA8wDQYLKwYBBAH5EAMBAQEwDQYJKoZIhvcN
-AQEFBQADggEBAK7gBVfHvekNdTs5RInz2CTEOqI6hSPAgQOVYHOI0+gzbcIb/xTs
-bs2ySI9dEybRNQU9FUFB8NYkO+KYSz9uTvI0N0FBbhjGtC4xRwsuSrkNItPMaatC
-Rt8DW/NOu0osa28CN8mSP7LMs/Ue6GJgyi3BUaeVkNH0IuUxnfZ/qgBMLhy7CfWL
-pVwcvlZoAENpmAEpkGOwzPonY66CcY2UyxdDBFPgfsHcsdDq9oRrQIk4okeKc6vt
-mE5LORQqEKCX21KhSDHP9Uir3fF7ZL2lwdzdGuvPYctciyDVupS2BwbZVcd/1zco
-LA6r/3tnveM7CW15sBz8ijJA3NquOg9zrc0=
------END CERTIFICATE-----
-
 subject= /C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root
 serial=51260A931CE27F9CC3A55F79E072AE82
 -----BEGIN CERTIFICATE-----
@@ -10033,6 +9836,36 @@
 EzeJiQ==
 -----END CERTIFICATE-----
 
+subject= /O=ZF/OU=ZF IT/CN=ZF Root CA
+serial=0727312D
+-----BEGIN CERTIFICATE-----
+MIIEgDCCA+mgAwIBAgIEBycxLTANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MB4XDTA5MDMxODEzMDg0N1oXDTE3MDMxODEzMDgyMFowMjELMAkG
+A1UECgwCWkYxDjAMBgNVBAsMBVpGIElUMRMwEQYDVQQDDApaRiBSb290IENBMIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnE8mMGUF0vvY1G0QzNkbM7+y
+m9q/pb2cc/tNtQipF6Vv6uwSI8nIUOr8xjLbmYjezqkaDtfXrsEoaILRou2wzHDf
+2x4wD7rTQ4L/j152QRhiwLnj3ipmGN4cS9jpwtqQzOVWVZh1qmYR7iqeK6++WNRO
+mzuJZR++1Fxvm0wrKdpr0AZHuzFusStS9TBNQCnRzb2Xlq4BX8Gk9IisSoh4k6zA
+xMd0VtR1Zesd1QF885E24Vx2Bb/nxafcaIBE6eu+yfyOUIfTo6u72+xkZwc4bCZl
+9eQmo7KecRR2cTgexY6h35PgOFZaG5P4WRcLjMnQoFuw9QHxbN7POw+i3C25xwID
+AQABo4IB2jCCAdYwEgYDVR0TAQH/BAgwBgEB/wIBAjCBkQYDVR0gBIGJMIGGMDoG
+DCsGAQQB5mcBAQECATAqMCgGCCsGAQUFBwIBFhxodHRwOi8vcGtpLnpmLXdvcmxk
+LmNvbS9jcHMvMEgGCSsGAQQBsT4BADA7MDkGCCsGAQUFBwIBFi1odHRwOi8vY3li
+ZXJ0cnVzdC5vbW5pcm9vdC5jb20vcmVwb3NpdG9yeS5jZm0wDgYDVR0PAQH/BAQD
+AgEGMIGJBgNVHSMEgYEwf6F5pHcwdTELMAkGA1UEBhMCVVMxGDAWBgNVBAoTD0dU
+RSBDb3Jwb3JhdGlvbjEnMCUGA1UECxMeR1RFIEN5YmVyVHJ1c3QgU29sdXRpb25z
+LCBJbmMuMSMwIQYDVQQDExpHVEUgQ3liZXJUcnVzdCBHbG9iYWwgUm9vdIICAaUw
+cQYDVR0fBGowaDA6oDigNoY0aHR0cDovL3d3dy5wdWJsaWMtdHJ1c3QuY29tL2Nn
+aS1iaW4vQ1JMLzIwMTgvY2RwLmNybDAqoCigJoYkaHR0cDovL3BraS56Zi13b3Js
+ZC5jb20vT21uaVJvb3QuY3JsMB0GA1UdDgQWBBT3fw/ub5yaXhMr+9znyh9d52sj
+xDANBgkqhkiG9w0BAQUFAAOBgQBzFkGoGigKiq3GYTlmGknlz6OWMc87W4GUQbyj
+0YW1txojr6+2nNn/h4hUDOfWbZODoFGmJ+PWYOmtM2B7ltGHa9XjO2tzhgph3z10
+3qG0sSgC4Le+pvg0im50KJW6qLNovPhKh61dia1MH0CZAPYbhVm9WPwi+gwZMfL6
+HcEM1A==
+-----END CERTIFICATE-----
+
 subject= /C=FR/O=Dhimyotis/OU=0002 481463081/CN=Certigna SSL PRIS/serialNumber=25
 serial=19
 -----BEGIN CERTIFICATE-----
@@ -10333,36 +10166,6 @@
 j2n/cZIn9WBGM1Q=
 -----END CERTIFICATE-----
 
-subject= /C=NL/O=DigiNotar B.V./CN=DigiNotar PKIoverheid CA Overheid en Bedrijven
-serial=013169B0
------BEGIN CERTIFICATE-----
-MIIEiDCCA3CgAwIBAgIEATFpsDANBgkqhkiG9w0BAQUFADBZMQswCQYDVQQGEwJO
-TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSowKAYDVQQDEyFTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gT3ZlcmhlaWQgQ0EwHhcNMDcwNzA1MDg0MjA3WhcN
-MTUwNzI3MDgzOTQ2WjBfMQswCQYDVQQGEwJOTDEXMBUGA1UEChMORGlnaU5vdGFy
-IEIuVi4xNzA1BgNVBAMTLkRpZ2lOb3RhciBQS0lvdmVyaGVpZCBDQSBPdmVyaGVp
-ZCBlbiBCZWRyaWp2ZW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDc
-vdKnTmoKuzuiheF/AK2+tDBomAfNoHrElM9x+Yo35FPrV3bMi+Zs/u6HVcg+uwQ5
-AKeAeKxbT370vbhUuHE7BzFJOZNUfCA7eSuPu2GQfbGs5h+QLp1FAalkLU3DL7nn
-UNVOKlyrdnY3Rtd57EKZ96LspIlw3Dgrh6aqJOadkiQbvvb91C8ZF3rmMgeUVAVT
-Q+lsvK9Hy7zL/b07RBKB8WtLu+20z6slTxjSzAL8o0+1QjPLWc0J3NNQ/aB2jKx+
-ZopC9q0ckvO2+xRG603XLzDgbe5bNr5EdLcgBVeFTegAGaL2DOauocBC36esgl3H
-aLcY5olLmmv6znn58yynAgMBAAGjggFQMIIBTDBIBgNVHSAEQTA/MD0GBFUdIAAw
-NTAzBggrBgEFBQcCARYnaHR0cDovL3d3dy5kaWdpbm90YXIubmwvY3BzL3BraW92
-ZXJoZWlkMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMIGABgNVHSME
-eTB3gBQLhtYPd6NosftkCcOIblwEHFfpPaFZpFcwVTELMAkGA1UEBhMCTkwxHjAc
-BgNVBAoTFVN0YWF0IGRlciBOZWRlcmxhbmRlbjEmMCQGA1UEAxMdU3RhYXQgZGVy
-IE5lZGVybGFuZGVuIFJvb3QgQ0GCBACYmnkwPQYDVR0fBDYwNDAyoDCgLoYsaHR0
-cDovL2NybC5wa2lvdmVyaGVpZC5ubC9Eb21PdkxhdGVzdENSTC5jcmwwHQYDVR0O
-BBYEFEwIyY128ZjHPt881y91DbF2eZfMMA0GCSqGSIb3DQEBBQUAA4IBAQAMlIca
-v03jheLu19hjeQ5Q38aEW9K72fUxCho1l3TfFPoqDz7toOMI9tVOW6+mriXiRWsi
-D7dUKH6S3o0UbNEc5W50BJy37zRERd/Jgx0ZH8Apad+J1T/CsFNt5U4X5HNhIxMm
-cUP9TFnLw98iqiEr2b+VERqKpOKrp11Lbyn1UtHk0hWxi/7wA8+nfemZhzizDXMU
-5HIs4c71rQZIZPrTKbmi2Lv01QulQERDjqC/zlqlUkxk0xcxYczopIro5Ij76eUv
-BjMzm5RmZrGrUDqhCYF0U1onuabSJc/Tw6f/ltAv6uAejVLpGBwgCkegllYOQJBR
-RKwa/fHuhR/3Qlpl
------END CERTIFICATE-----
-
 subject= /C=AU/O=Department of Education and Training/OU=ITD/CN=NSW-DET-ISS-CA1
 serial=61317BE300010000000E
 -----BEGIN CERTIFICATE-----
@@ -11533,36 +11336,6 @@
 fMdjjfQm4MWm+n4l9iQd4YCT37yyoUYJrlOdoVgWF+r+hcb+cj0AoEBnE0DxJM8=
 -----END CERTIFICATE-----
 
-subject= /C=CA/O=Syncrude Canada Ltd/CN=Syncrude Corporate Issuing CA 5
-serial=6102D2B7000100000008
------BEGIN CERTIFICATE-----
-MIIErDCCA5SgAwIBAgIKYQLStwABAAAACDANBgkqhkiG9w0BAQUFADBUMQswCQYD
-VQQGEwJDQTEcMBoGA1UEChMTU3luY3J1ZGUgQ2FuYWRhIEx0ZDEnMCUGA1UEAxMe
-U3luY3J1ZGUgQ29ycG9yYXRlIFBvbGljeSBDQSAyMB4XDTA5MDYxNzE1NDcxMloX
-DTE0MDYxNzE1NTcxMlowVTELMAkGA1UEBhMCQ0ExHDAaBgNVBAoTE1N5bmNydWRl
-IENhbmFkYSBMdGQxKDAmBgNVBAMTH1N5bmNydWRlIENvcnBvcmF0ZSBJc3N1aW5n
-IENBIDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCfAkvTlz9Qxab2
-PpdhkXPnYb6jCwzSUQaOn/ANMKcR0jfgz5lbaI6oSd/ughvWyrrG8juiqTvSjEwN
-rmlZex8BsasXs16/LDmEtr35VZ1c8/4TWG3ENuug3d3QXxNRE7J7t3fgN9zCTdsW
-GAFWmxTG0X7AT/52mSw6xhvsrXkk0rHxvBuENqYr/9DujlY9wz70axftNeZbxh9T
-yJQhx8rSml3UKQtmzj7OMSZuLajEyL9ehyrLXd8HlVqrMam1jJCjuGnfjSRwDfu1
-Y461ANMUfyHVWaP8JbGWOCJL6zT/S9nS/2Jr+32wEvukR0Nm2GkOkqutJr4d936G
-uGd/b/OLAgMBAAGjggF9MIIBeTASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQW
-BBRbt4SQbpzxxr257M5P/q64WOtiVzALBgNVHQ8EBAMCAYYwEgYJKwYBBAGCNxUB
-BAUCAwEAATAjBgkrBgEEAYI3FQIEFgQUIFNOCg/fqVP+ZOYoYcf5JctQs90wGQYJ
-KwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwHwYDVR0jBBgwFoAUVkqU3zAQGVkPxMNJ
-hYT1QYAjksUwVQYDVR0fBE4wTDBKoEigRoZEaHR0cDovL3BraS5zeW5jcnVkZS5j
-YS9jYS9TeW5jcnVkZSUyMENvcnBvcmF0ZSUyMFBvbGljeSUyMENBJTIwMi5jcmww
-awYIKwYBBQUHAQEEXzBdMFsGCCsGAQUFBzAChk9odHRwOi8vcGtpLnN5bmNydWRl
-LmNhL2NhL1BsYXRpbmlfU3luY3J1ZGUlMjBDb3Jwb3JhdGUlMjBQb2xpY3klMjBD
-QSUyMDIoMSkuY3J0MA0GCSqGSIb3DQEBBQUAA4IBAQDR6ogJviIW5Q50qWFlaOSV
-JU7C2o1czVyQ32cZbDCkyqF/nxxRC3uSgx1AGNCqsCBXXiSMaMlxV915rlniCS5S
-MoDr1m6AwQCffBCbnhnY5nJkStfS0lsRrxDI4Dw02XlAyGpILlpcrRZx/k8iiawt
-mKEvyAukS6vjVqhSXd8HeBRPvRunTPDBqcpLtM6Q6Q1Ziqozi+8aeY2hhnp1LD1/
-yoH7un++BOWe11zx7kLIoZvHbY05+FwkYrmYufZCuxpfzGDgfngEcjSYUhOxa4Xl
-5OkU/H9i4mIDyptqnBWvKOX7RCx60WXo91dYt0bvVt2SfK63+TtmdJulkHDF+XyN
------END CERTIFICATE-----
-
 subject= /C=NL/O=TERENA/CN=TERENA eScience SSL CA
 serial=11439EAF68210293F7C5011B5C17DCA0
 -----BEGIN CERTIFICATE-----
@@ -12090,37 +11863,6 @@
 XE39PR/QMudYjycmOYdG
 -----END CERTIFICATE-----
 
-subject= /O=Munich Re Group/CN=MRG Intermediate CA 01
-serial=072715E9
------BEGIN CERTIFICATE-----
-MIIEvDCCBCWgAwIBAgIEBycV6TANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MB4XDTA3MDgyOTE0MjM0NFoXDTE3MDgyOTE0MjMwNFowOzEYMBYG
-A1UEChMPTXVuaWNoIFJlIEdyb3VwMR8wHQYDVQQDExZNUkcgSW50ZXJtZWRpYXRl
-IENBIDAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqlMj6jkMsZGD
-a7wcg+BAhSwgPHzwXo9z2IiuGIo81Uah0NkyV0G8KaUhCyUpvKIwCgYiUqOoaUDG
-+uN5YMlxANzemHLF4dJS8v3HTPmiSIem7iCJAZJZHfazanwXqHGzVh5p5O0rij9o
-N0KKAWdDHa73FCTxuwtcTLIbdq9LdlgNF5Z8PMXLGh88mr+g6EzAfS/Omd3Gwn8Q
-+64DIr3cHVfIXbAk95ERpO8Bpw6zP3SteqcElg9IOmzzKgT5s89irxLEkhUXfi7k
-rFohNLHAqVa9F7x75Tp2fq1d7tb6fYNdiGWG8dv0Sn7GXJmVWdBgCXAzDY+7Or37
-mUMxGOn8ZQIDAQABo4ICDTCCAgkwEgYDVR0TAQH/BAgwBgEB/wIBATCBpgYDVR0g
-BIGeMIGbME8GCysGAQQBwwwDChQDMEAwPgYIKwYBBQUHAgEWMmh0dHA6Ly9jZXJ0
-aWZpY2F0ZS5tdW5pY2hyZS5jb20vQ1BTL01SR1RydXN0Y2VudGVyMEgGCSsGAQQB
-sT4BADA7MDkGCCsGAQUFBwIBFi1odHRwOi8vY3liZXJ0cnVzdC5vbW5pcm9vdC5j
-b20vcmVwb3NpdG9yeS5jZm0wDgYDVR0PAQH/BAQDAgHGMIGJBgNVHSMEgYEwf6F5
-pHcwdTELMAkGA1UEBhMCVVMxGDAWBgNVBAoTD0dURSBDb3Jwb3JhdGlvbjEnMCUG
-A1UECxMeR1RFIEN5YmVyVHJ1c3QgU29sdXRpb25zLCBJbmMuMSMwIQYDVQQDExpH
-VEUgQ3liZXJUcnVzdCBHbG9iYWwgUm9vdIICAaUwgY4GA1UdHwSBhjCBgzBFoEOg
-QYY/aHR0cDovL2NlcnRpZmljYXRlLm11bmljaHJlLmNvbS9DRFAvR1RFQ3liZXJ0
-cnVzdEdsb2JhbFJvb3QuY3JsMDqgOKA2hjRodHRwOi8vd3d3LnB1YmxpYy10cnVz
-dC5jb20vY2dpLWJpbi9DUkwvMjAxOC9jZHAuY3JsMB0GA1UdDgQWBBQWRHNfK+0h
-F5HRjyfw7CuQA2qFyTANBgkqhkiG9w0BAQUFAAOBgQA4XQDqFDkjE+x6fXfbpnM1
-S0MgnN3P7poABrI/njR/7HjY0eIsF/9tZoKCKfVcAvw43OUv5IEpYqxAFgnWZjOW
-YTUfcApCKA158MirdPFvZ4RUQ2Dy9S89CeBOGyNtAgbxalWBgCHX+fyqUUcXySh5
-J8q0SvtPaKAw9jH90XK+Dg==
------END CERTIFICATE-----
-
 subject= /C=DE/O=Hochschule Offenburg/CN=Hochschule Offenburg CA
 serial=0D167ACB
 -----BEGIN CERTIFICATE-----
@@ -12493,37 +12235,6 @@
 CPMr3u00HUSe0gST9MsFFy0JLS1j7/YmC3s=
 -----END CERTIFICATE-----
 
-subject= /C=RO/O=GLOBE HOSTING CERTIFICATION AUTHORITY/CN=GLOBE SSL Domain Validated CA
-serial=0F6F10D9FB0CC82A4951B281E33F1DD6
------BEGIN CERTIFICATE-----
-MIIExzCCA6+gAwIBAgIQD28Q2fsMyCpJUbKB4z8d1jANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNMDkwMjIzMDAwMDAwWhcNMjAwNTMwMTA0ODM4WjBlMQswCQYD
-VQQGEwJSTzEuMCwGA1UEChMlR0xPQkUgSE9TVElORyBDRVJUSUZJQ0FUSU9OIEFV
-VEhPUklUWTEmMCQGA1UEAxMdR0xPQkUgU1NMIERvbWFpbiBWYWxpZGF0ZWQgQ0Ew
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDJufiWc5XdGv9LKABC3dGr
-SjA0lD5ac5RJuj1QRkncmnL1fov1SKSlek4sc1eBNW9AR7U770XXtBZm7ct1GDTU
-1qV3qZ7z0wpE7G9IkkdHhDncjzCzESEIt11i9XjIzk187+dtNuu4J/7sNOC/gxX7
-aq2PCfgo9gHYGf0Ab2h/HsmNKHNoAaRyMcXBESfYHtbSwqkjDye4W9Q7j1z2JmxO
-SNWJy0lDwVdbW5KQpD8ACgj0x9tNPMHKPHj7wDdFFv/Vgz086ZzxSRycbG1+N7Nk
-gvX+89PGEnDjzQ2dOEsFvWIAUnJwNfWqKrC/kzZ/47qFByWyLxIN9Z1YXfiFANnP
-AgMBAAGjggE+MIIBOjAfBgNVHSMEGDAWgBShcl8mGyiYQ5VdBzfVhZadS9LDRTAd
-BgNVHQ4EFgQUH1SWcq09QOeWWfdJMVMJZvkNa3IwDgYDVR0PAQH/BAQDAgEGMBIG
-A1UdEwEB/wQIMAYBAf8CAQAwGAYDVR0gBBEwDzANBgsrBgEEAbIxAQICGzBEBgNV
-HR8EPTA7MDmgN6A1hjNodHRwOi8vY3JsLnVzZXJ0cnVzdC5jb20vVVROLVVTRVJG
-aXJzdC1IYXJkd2FyZS5jcmwwdAYIKwYBBQUHAQEEaDBmMD0GCCsGAQUFBzAChjFo
-dHRwOi8vY3J0LnVzZXJ0cnVzdC5jb20vVVROQWRkVHJ1c3RTZXJ2ZXJfQ0EuY3J0
-MCUGCCsGAQUFBzABhhlodHRwOi8vb2NzcC51c2VydHJ1c3QuY29tMA0GCSqGSIb3
-DQEBBQUAA4IBAQBnwhaw5L6328SVMS5D9yvMaO4onT1rRWnUtHUFJ9AjOj0JIj16
-2i+3RjAaw33oUQXXL99COXXT/4d5ZLMUUIy3QdmZJkxnDnIsn927En8R1+wsyJqW
-c0HA1qaGlhQBqLONchbTi0GtQvIr4Qffxa1yYGz/KS5l67ysWE6AM+JWQ+VYus4z
-6ylrbKTp3Bt9ckxChdSOo2JJYMhWWOVrpdKTYER+v3EI27ZbWTmtEUM25ZsPNZ5z
-EniiQRztzFQgWOkb3Osr9spfSwgf7zTAMo9v7VzTwUi5T0gvEbh2dFCTKr95TlGS
-74DauPMB3VZ32t1XH6nTZKqhqkyw0GMZqJ35
------END CERTIFICATE-----
-
 subject= /C=US/O=DREAMHOST CERTIFICATION AUTHORITY/CN=DREAMHOST SSL DOMAIN VALIDATED CA
 serial=7EE45145D65AD88691203A8CBF425508
 -----BEGIN CERTIFICATE-----
@@ -20903,6 +20614,39 @@
 vIWaIZ6WrpIw6aFTDOareLFcHt4QZ99MH8ORvPD1heHbVFtEcptINQ==
 -----END CERTIFICATE-----
 
+subject= /C=DE/ST=Bayern/L=Ansbach/O=Hochschule Ansbach/OU=Hochschulrechenzentrum/CN=HS Ansbach CA/emailAddress=ca@hs-ansbach.de
+serial=0FF522FE
+-----BEGIN CERTIFICATE-----
+MIIFNDCCBBygAwIBAgIED/Ui/jANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJE
+RTETMBEGA1UEChMKREZOLVZlcmVpbjEQMA4GA1UECxMHREZOLVBLSTEkMCIGA1UE
+AxMbREZOLVZlcmVpbiBQQ0EgR2xvYmFsIC0gRzAxMB4XDTEwMDQyNjE0MzgzOFoX
+DTE5MDYzMDAwMDAwMFowgacxCzAJBgNVBAYTAkRFMQ8wDQYDVQQIEwZCYXllcm4x
+EDAOBgNVBAcTB0Fuc2JhY2gxGzAZBgNVBAoTEkhvY2hzY2h1bGUgQW5zYmFjaDEf
+MB0GA1UECxMWSG9jaHNjaHVscmVjaGVuemVudHJ1bTEWMBQGA1UEAxMNSFMgQW5z
+YmFjaCBDQTEfMB0GCSqGSIb3DQEJARYQY2FAaHMtYW5zYmFjaC5kZTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBAL640/3OytiC+Yxp4RroSSrJY7SEcBcj
+5mZuIBG49XMP3zQJkJNjclm1JOtTcHi8wciPhFmlhQoLwKMvUHFBFMudUwoIl59k
+m69SwjtxLywl3hM8Qhr7JXIdLBcbSwfjMbbnfMIZhs8VNRNqDBcO22xMRBbwKmDq
+W9Jh0dgmcjRxrJPi/bKTUJrGzq9S71eybLd9+1S18P0ASsSwWoPqf7Dblprn5wrR
+dhxziWt5/k0ZGaeMWzHOqKhkoHnDUcHD88que3ecd4dPdCafrPAmQbM2ERe3HGO6
+t+taZU+0j4Ayz+bnWc1bc/xmj43h3mbL55UzBve6AjzeEscVijOR498CAwEAAaOC
+AbIwggGuMBIGA1UdEwEB/wQIMAYBAf8CAQEwCwYDVR0PBAQDAgEGMB0GA1UdDgQW
+BBTFNdVe/8R+j2keS3+n6fdGZxTpfjAfBgNVHSMEGDAWgBRJt8bP6D0ff+pEexMp
+9/EKcD7eZDAbBgNVHREEFDASgRBjYUBocy1hbnNiYWNoLmRlMIGIBgNVHR8EgYAw
+fjA9oDugOYY3aHR0cDovL2NkcDEucGNhLmRmbi5kZS9nbG9iYWwtcm9vdC1jYS9w
+dWIvY3JsL2NhY3JsLmNybDA9oDugOYY3aHR0cDovL2NkcDIucGNhLmRmbi5kZS9n
+bG9iYWwtcm9vdC1jYS9wdWIvY3JsL2NhY3JsLmNybDCBogYIKwYBBQUHAQEEgZUw
+gZIwRwYIKwYBBQUHMAKGO2h0dHA6Ly9jZHAxLnBjYS5kZm4uZGUvZ2xvYmFsLXJv
+b3QtY2EvcHViL2NhY2VydC9jYWNlcnQuY3J0MEcGCCsGAQUFBzAChjtodHRwOi8v
+Y2RwMi5wY2EuZGZuLmRlL2dsb2JhbC1yb290LWNhL3B1Yi9jYWNlcnQvY2FjZXJ0
+LmNydDANBgkqhkiG9w0BAQUFAAOCAQEA4IXI/sRjXgzooU8F4fnbBW3Qq9qxST/R
+fseUR7iY2SyLjPLJ+e4RpyHspRIeZ5CV0YWrQ13aeFlcE2eKBw/c9vpCc9l/Ptsa
+eRb99sBiGojjryGpuVsNGLoD/185FQetYt8nwa3DS3dVSlJ369SpwiENsz3rmAr3
+5Rvpvb5SSRP6km+IiPbbNaM+MV1vObL64GwdDTS03yZD/H5mkJtgz/liOAVF6699
+BjAh0CzkY/VqeVGIGER7QRJ1H+WxbtLeDQVozY4xTu/uDDw1WNX9JCBA71DXGq6R
+1/P2AaCYSjWvyksBoJLpALYRa3WayJvUFTLmLVyFrfX7DYl9KEK/5Q==
+-----END CERTIFICATE-----
+
 subject= /C=DE/O=Fachhochschule Schmalkalden/OU=Rechenzentrum/CN=Fachhochschule Schmalkalden CA - G01/emailAddress=pki@fh-schmalkalden.de
 serial=10400393
 -----BEGIN CERTIFICATE-----
@@ -22786,6 +22530,40 @@
 ddo+kGdNivk=
 -----END CERTIFICATE-----
 
+subject= /C=DE/ST=Berlin/L=Berlin/O=Helmholtz-Zentrum Berlin fuer Materialien und Energie GmbH/CN=HZB CA/emailAddress=hzbca@helmholtz-berlin.de
+serial=0E682896
+-----BEGIN CERTIFICATE-----
+MIIFRTCCBC2gAwIBAgIEDmgoljANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJE
+RTETMBEGA1UEChMKREZOLVZlcmVpbjEQMA4GA1UECxMHREZOLVBLSTEkMCIGA1UE
+AxMbREZOLVZlcmVpbiBQQ0EgR2xvYmFsIC0gRzAxMB4XDTA5MDYyOTExNTIzOVoX
+DTE5MDYzMDAwMDAwMFowga8xCzAJBgNVBAYTAkRFMQ8wDQYDVQQIEwZCZXJsaW4x
+DzANBgNVBAcTBkJlcmxpbjFDMEEGA1UEChM6SGVsbWhvbHR6LVplbnRydW0gQmVy
+bGluIGZ1ZXIgTWF0ZXJpYWxpZW4gdW5kIEVuZXJnaWUgR21iSDEPMA0GA1UEAxMG
+SFpCIENBMSgwJgYJKoZIhvcNAQkBFhloemJjYUBoZWxtaG9sdHotYmVybGluLmRl
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAl74HDhcDq2exs5nVVnVv
+6Lsomu3jgBzpHUfhr1AuH/iXTPCLkq06ogNhwT+7h/HEzKKFjquf+4RcQl/fG6jQ
+/2DigKDzQGM7A5pG13LGdSM6Asr9aljLlkqLcCReAAhU2VpEe1dprZbWh0am2lrl
+4n28L8vZPmcn/y4+VCc+n5u+EmsIGBTTQjjcjCIJlEROX7+AsGdB+PRl/gzyuuzL
+m7JxGwN5opiSH0UWVoLZ+X3rqZ8WgTl8wr2QIAX1ndSjDXkNehDbrkk/ucQlZDnl
+lkilsPRHuy1x8qyouNvAQwQ2fAwwbG5FkLFf8F9GP2vY1p3MbZLcNc+XE9QI7of4
+nQIDAQABo4IBuzCCAbcwEgYDVR0TAQH/BAgwBgEB/wIBATALBgNVHQ8EBAMCAQYw
+HQYDVR0OBBYEFF5CLLdRNgOKxAicgZS9vtvjS3zcMB8GA1UdIwQYMBaAFEm3xs/o
+PR9/6kR7Eyn38QpwPt5kMCQGA1UdEQQdMBuBGWh6YmNhQGhlbG1ob2x0ei1iZXJs
+aW4uZGUwgYgGA1UdHwSBgDB+MD2gO6A5hjdodHRwOi8vY2RwMS5wY2EuZGZuLmRl
+L2dsb2JhbC1yb290LWNhL3B1Yi9jcmwvY2FjcmwuY3JsMD2gO6A5hjdodHRwOi8v
+Y2RwMi5wY2EuZGZuLmRlL2dsb2JhbC1yb290LWNhL3B1Yi9jcmwvY2FjcmwuY3Js
+MIGiBggrBgEFBQcBAQSBlTCBkjBHBggrBgEFBQcwAoY7aHR0cDovL2NkcDEucGNh
+LmRmbi5kZS9nbG9iYWwtcm9vdC1jYS9wdWIvY2FjZXJ0L2NhY2VydC5jcnQwRwYI
+KwYBBQUHMAKGO2h0dHA6Ly9jZHAyLnBjYS5kZm4uZGUvZ2xvYmFsLXJvb3QtY2Ev
+cHViL2NhY2VydC9jYWNlcnQuY3J0MA0GCSqGSIb3DQEBBQUAA4IBAQCTOebEGrSq
+/sfXkELe1SzP80VBMStI1SLlNY+zCRDhHgYMpdqnTmkFV0cvj5hlAt6joUBtaTep
++8Y9frSgPeQ/n1BqDK6C2O8tigXK6aItQa9n7ILaZRqD6WlH/s7L/BzUObPb2rmA
+aYq6XzbIqBVQUG3yCJzY85xGUkillanWNR4dUHqORhPr82TOIMd3UsItj5GgmV6w
+nyC4JQjaG/P7X/aX1oQxGeLaFHH6mfAZnN43ATQcL5Di8BlfUeVYxsHabQV8YsCH
+T4ASTU/wpKddKw+DH8WfAoNYaJa0/yxOXl8gZBp6XNS+9OFde+VpX7qCflzaWDmJ
+ZcV1zZzg8qIJ
+-----END CERTIFICATE-----
+
 subject= /C=DE/ST=Bayern/L=Muenchen/O=Fraunhofer/OU=Fraunhofer Corporate PKI/CN=Fraunhofer Service CA - G01/emailAddress=pki@fraunhofer.de
 serial=13C33F11E20125
 -----BEGIN CERTIFICATE-----
@@ -23874,6 +23652,40 @@
 uNoAl2NGPPdhA73xIaCLO8RhEg==
 -----END CERTIFICATE-----
 
+subject= /C=DE/ST=Brandenburg/L=Frankfurt (Oder)/O=Europa-Universitaet Viadrina Frankfurt (Oder)/CN=Europa-Universitaet Viadrina CA/emailAddress=pki@europa-uni.de
+serial=10511FE5
+-----BEGIN CERTIFICATE-----
+MIIFUDCCBDigAwIBAgIEEFEf5TANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJE
+RTETMBEGA1UEChMKREZOLVZlcmVpbjEQMA4GA1UECxMHREZOLVBLSTEkMCIGA1UE
+AxMbREZOLVZlcmVpbiBQQ0EgR2xvYmFsIC0gRzAxMB4XDTEwMDcwNTA5MTM1N1oX
+DTE5MDYzMDAwMDAwMFowgcIxCzAJBgNVBAYTAkRFMRQwEgYDVQQIEwtCcmFuZGVu
+YnVyZzEZMBcGA1UEBxMQRnJhbmtmdXJ0IChPZGVyKTE2MDQGA1UEChMtRXVyb3Bh
+LVVuaXZlcnNpdGFldCBWaWFkcmluYSBGcmFua2Z1cnQgKE9kZXIpMSgwJgYDVQQD
+Ex9FdXJvcGEtVW5pdmVyc2l0YWV0IFZpYWRyaW5hIENBMSAwHgYJKoZIhvcNAQkB
+FhFwa2lAZXVyb3BhLXVuaS5kZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAMe1//UN9wDYeiaH21bX5loqfkwqzvirsnZN8cE0rX0yj6HvpspalNVt6LhK
+kvA0B8pt4MldTms+8JIYo4oe+nE6HcRQWGY06A57El3MgHjPuSqzHQ5TmBXij9PM
+jPhjN01P4dFOumbzF6xQX8pFArfnN6ekjT/saRSUwfvCCeV+EaoIs/L1pPOTAdKs
+zc0e1uFoMrYccTQlo41CkFORUJQQMnLQBRuzDnf9Wru59+rygMvl9IR7ICkWYjVV
+3BZVW89wIKmFHm1+dBameYSAGvteFI7iFv/oh7ifDr+FWJu4y2vutq20z3+1/i/p
+wcVeZiULJJrqc8EqJae7Y0Xi8cMCAwEAAaOCAbMwggGvMBIGA1UdEwEB/wQIMAYB
+Af8CAQEwCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBTIeu6QRZ+2BUqNDv51fhhtnGtW
+JzAfBgNVHSMEGDAWgBRJt8bP6D0ff+pEexMp9/EKcD7eZDAcBgNVHREEFTATgRFw
+a2lAZXVyb3BhLXVuaS5kZTCBiAYDVR0fBIGAMH4wPaA7oDmGN2h0dHA6Ly9jZHAx
+LnBjYS5kZm4uZGUvZ2xvYmFsLXJvb3QtY2EvcHViL2NybC9jYWNybC5jcmwwPaA7
+oDmGN2h0dHA6Ly9jZHAyLnBjYS5kZm4uZGUvZ2xvYmFsLXJvb3QtY2EvcHViL2Ny
+bC9jYWNybC5jcmwwgaIGCCsGAQUFBwEBBIGVMIGSMEcGCCsGAQUFBzAChjtodHRw
+Oi8vY2RwMS5wY2EuZGZuLmRlL2dsb2JhbC1yb290LWNhL3B1Yi9jYWNlcnQvY2Fj
+ZXJ0LmNydDBHBggrBgEFBQcwAoY7aHR0cDovL2NkcDIucGNhLmRmbi5kZS9nbG9i
+YWwtcm9vdC1jYS9wdWIvY2FjZXJ0L2NhY2VydC5jcnQwDQYJKoZIhvcNAQEFBQAD
+ggEBAFV4j6geWtBSRRJ2UO7TtOqmiqiAsZnJ3dGh98eyX73DRtmH11Ccu6NX3TsA
+b8ATcXKhLYQImBttRuFR/Df6Qr5s6KZlhaHp43lzfTTExmtQeaHtCiqjIeiiJ53l
+T4kr1lNPjNJV2BGON5yK1H2bIzVyBN4bZ3oYpz7Uukfcf1h+/GpERjuRzJDW9sNd
+ePmQqm486k9Q9xPIUXWiNAozIgmH2kDueeoyMoZ1EdFUCdXSs9IxMtE6qkq4o7LP
+zb9gjYJZ3Pqm6HOOXpdhpIZ7V/N/lS2Q/h9C5YJrR2+OKAubsRd+wg7v3TwQvDOH
+0KR0a8vzYPzxDc9goONtwkOHxhY=
+-----END CERTIFICATE-----
+
 subject= /CN=PKI FINMECCANICA/O=FINMECCANICA/OU=FINMECCANICA/C=IT
 serial=072740CD
 -----BEGIN CERTIFICATE-----
@@ -26177,41 +25989,6 @@
 MXA68Mb6ZdlkhGEmZYVBcOmkaKs+P+SggTofsK27BlpugAtNWjEy5JY=
 -----END CERTIFICATE-----
 
-subject= /O=Munich Re Group/CN=MRG Issuing CA 01
-serial=0413
------BEGIN CERTIFICATE-----
-MIIFmjCCBIKgAwIBAgICBBMwDQYJKoZIhvcNAQEFBQAwOzEYMBYGA1UEChMPTXVu
-aWNoIFJlIEdyb3VwMR8wHQYDVQQDExZNUkcgSW50ZXJtZWRpYXRlIENBIDAxMB4X
-DTEwMDUxMTEyNTcwMloXDTE1MDUxMTEyNTY1MlowNjEYMBYGA1UEChMPTXVuaWNo
-IFJlIEdyb3VwMRowGAYDVQQDExFNUkcgSXNzdWluZyBDQSAwMTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAI8IeuwVz25L+XaVxTYTwuuLJ87MSSkNLDPJ
-Cq/oyiiWcFZiXfkvN1qLrrceGXQJN85uE/XrvJtuIyh678NnPJgJVLxi6Gs8Jx+B
-GbVRqVaCu/4MCQowAJnRJ0tCc9CsMgmYuwwCPY7CdkR5nYQPPtvzWCLI0Cds5jvR
-wUWSkgHZSbVGbcvc/GOkYx56JXgZnicrHns0OCT2LbLH7nRIwSX8jml09ctOuiG5
-GWCN0PmZd3M7FxQhyChodwEsKQIr6ERm7jbsV6RddgauSyoFPyPHJtD2crtM7rT3
-TCM3Mmm90aP50DfzLtFuUNsUEQ7Z6I3vs/4mVyICuJYfDwOWW/kCAwEAAaOCAqsw
-ggKnMBIGA1UdEwEB/wQIMAYBAf8CAQAwWgYDVR0gBFMwUTBPBgsrBgEEAcMMAwoU
-AzBAMD4GCCsGAQUFBwIBFjJodHRwOi8vY2VydGlmaWNhdGUubXVuaWNocmUuY29t
-L0NQUy9NUkdUcnVzdGNlbnRlcjCB7wYIKwYBBQUHAQEEgeIwgd8wSgYIKwYBBQUH
-MAKGPmh0dHA6Ly9jZXJ0aWZpY2F0ZS5tdW5pY2hyZS5jb20vQUlBL01SR0ludGVy
-bWVkaWF0ZUNBMDEtMDEuY3J0MIGQBggrBgEFBQcwAoaBg2xkYXA6Ly9jZXJ0aWZp
-Y2F0ZS5tdW5pY2hyZS5jb20vQ049TVJHSW50ZXJtZWRpYXRlQ0EwMSxDTj1BSUEs
-Tz1NdW5pY2hSZUdyb3VwP2NBQ2VydGlmaWNhdGU/YmFzZT9vYmplY3RDbGFzcz1j
-ZXJ0aWZpY2F0aW9uQXV0aG9yaXR5MAsGA1UdDwQEAwIBxjAfBgNVHSMEGDAWgBQW
-RHNfK+0hF5HRjyfw7CuQA2qFyTCB9QYDVR0fBIHtMIHqMESgQqBAhj5odHRwOi8v
-Y2VydGlmaWNhdGUubXVuaWNocmUuY29tL0NEUC9NUkdJbnRlcm1lZGlhdGVDQTAx
-LTAxLmNybDCBoaCBnqCBm4aBmGxkYXA6Ly9jZXJ0aWZpY2F0ZS5tdW5pY2hyZS5j
-b20vY24lM2RNUkdJbnRlcm1lZGlhdGVDQTAxLTAxLGNuJTNkQ0RQLG8lM2RNdW5p
-Y2hSZUdyb3VwP2NlcnRpZmljYXRlUmV2b2NhdGlvbkxpc3Q/YmFzZT8ob2JqZWN0
-Q2xhc3M9Y1JMRGlzdHJpYnV0aW9uUG9pbnQpMB0GA1UdDgQWBBSYNdSKidBsAuuI
-SN66mwcM2pB8azANBgkqhkiG9w0BAQUFAAOCAQEAdRGaf18t0Ii2aMMncVZ6ISr0
-gYmDQkcZwFQgofQ3eMRL03uPGNufC/wtR+pKBwQPh9DQqZDPA+6+5kytKd8VS1T+
-Mejjb5ayA2btMVIBCi+YrFx8aiLVmKg/TfiKMuB3nmvOLYLRGms6bUBrnpu7eIiD
-snGPZpTd2iE6ibD3mVYRmDrvwn4XqSg3gOu/S6Ck9ktZtdx6JKgRYiYNA5vQSnpQ
-MTgmJsHaJqHi/VUAcpAVgCbmexckArLzfMDgjhtf0jSqNW7TlA9fzzshKl781gwj
-z0xQKbayCAz3NbZMSc3an+isrgJ9lNm+mu/FK5i+AFl7W/qZDcOTG2AdpW/ZkA==
------END CERTIFICATE-----
-
 subject= /C=PT/O=MULTICERT - Servi\xC3\xA7os de Certifica\xC3\xA7\xC3\xA3o Electr\xC3\xB3nica S.A./OU=Entidade de Certifica\xC3\xA7\xC3\xA3o Credenciada/CN=MULTICERT - Entidade de Certifica\xC3\xA7\xC3\xA3o 001
 serial=0727295D
 -----BEGIN CERTIFICATE-----
@@ -26247,42 +26024,6 @@
 geSy8IW45BwHlpVm5hwjWROJnIApiT4HZXLj5ZPfhbJvb1M0msAD0x0nElq2/fs=
 -----END CERTIFICATE-----
 
-subject= /C=US/O=Entrust/OU=Certification Authorities/CN=Entrust Managed Services Commercial Public Root CA
-serial=4C0E6212
------BEGIN CERTIFICATE-----
-MIIFozCCBIugAwIBAgIETA5iEjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw0xMDA2MzAxODM0NTRaFw0yMDA2
-MzAxOTA0NTRaMIGAMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHRW50cnVzdDEiMCAG
-A1UECxMZQ2VydGlmaWNhdGlvbiBBdXRob3JpdGllczE7MDkGA1UEAxMyRW50cnVz
-dCBNYW5hZ2VkIFNlcnZpY2VzIENvbW1lcmNpYWwgUHVibGljIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDMXI+wxZoiWKTiB15DT/VyF8eT
-SWED/vatNS6/Y+s9kbGo2oI8Qlk/lsWn7lv9UeJmIVeu+Pp6iNTyQ09kOAyBBmAQ
-/yKq4PRHnsrPuxgkirXZ5t3AumSPVr62v2kHVce9S94LjCR2UIov0EMvs8/qq95K
-IGxGLTgdoeDVXI/5RZeAsGTquePKvbHest1FmPQaly2pRPdnlRHEmJQ2NkaR45Ae
-QvKKn8kzpYeCoBLyCsD0HbHzlIfPaujKqEcQZIbJUyD0y++KNajSPATWImCt2eXd
-eHEUcnmcHGq9ha1EEI02oW8zT2R0xhjdLoR5Ds+xg43iGyxLPdoliIxHVPhhAgMB
-AAGjggHtMIIB6TAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBATAz
-BggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmVudHJ1c3Qu
-bmV0MIIBGAYDVR0gBIIBDzCCAQswVwYNYIZIAYb6a4FIAwoCBDBGMEQGCCsGAQUF
-BwIBFjhodHRwOi8vY29tcHViLm1hbmFnZWQuZW50cnVzdC5jb20vQ1AvRU1TQ29t
-bWVyY2lhbENQLnBkZjBXBg1ghkgBhvprgUgDCgIFMEYwRAYIKwYBBQUHAgEWOGh0
-dHA6Ly9jb21wdWIubWFuYWdlZC5lbnRydXN0LmNvbS9DUC9FTVNDb21tZXJjaWFs
-Q1AucGRmMFcGDWCGSAGG+muBSAMKAgYwRjBEBggrBgEFBQcCARY4aHR0cDovL2Nv
-bXB1Yi5tYW5hZ2VkLmVudHJ1c3QuY29tL0NQL0VNU0NvbW1lcmNpYWxDUC5wZGYw
-MgYDVR0fBCswKTAnoCWgI4YhaHR0cDovL2NybC5lbnRydXN0Lm5ldC8yMDQ4Y2Eu
-Y3JsMB0GA1UdDgQWBBTAMkkFZmaDhxlpua6DJQYZiwXXdDAfBgNVHSMEGDAWgBRV
-5IHREYC+2Im5CKMx+aEkCRa5cDANBgkqhkiG9w0BAQUFAAOCAQEAD7NwMLoCbvFz
-WEObVh724gCE6XY+xB2NpOrYpu7dlh98kzn7upnY1BMhjvLHiWDMcxIIsar/XFNI
-7AePCF4PGpvfudBtB/U5umTxFVYgLLexxsN2cUOP5yd0mJb6k3wU6NOi8mPZNtSf
-ullUs7avzGHWx787no9d97XrIKKCMV8HwJjtZCt0OKFXVCYWMNMcFqVvBX2pbqgS
-lSBtZhzTIfi3MYkEOY9Zgrtsh+2hP22OJuskImNEHqlYbM//OqlCGQz+4sK6Ai/3
-HMfKFKJ/6Oyf8j48dO123nPRMQAy/oBHEITGQQUK0hdDZyglGOzTHGWH2Ae33vrz
-s+8T9rG8Gw==
------END CERTIFICATE-----
-
 subject= /C=DE/O=T-Systems Enterprise Services GmbH/OU=Trust Center Deutsche Telekom/CN=Shared Business CA
 serial=010A
 -----BEGIN CERTIFICATE-----
@@ -26499,42 +26240,6 @@
 qQTFdYgB2nhS+DzqxUhXUiRPZl3Oj4ZxwyOg9oez0g==
 -----END CERTIFICATE-----
 
-subject= /C=DE/O=Berufsakademie Sachsen - Staatliche Studienakademie Dresden/OU=Rechenzentrum/CN=BA Sachsen - Staatliche Studienakademie Dresden CA - G01/emailAddress=rechenzentrum@ba-dresden.de
-serial=14DC2F29C82307
------BEGIN CERTIFICATE-----
-MIIFwDCCBKigAwIBAgIHFNwvKcgjBzANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQG
-EwJERTETMBEGA1UEChMKREZOLVZlcmVpbjEQMA4GA1UECxMHREZOLVBLSTEkMCIG
-A1UEAxMbREZOLVZlcmVpbiBQQ0EgR2xvYmFsIC0gRzAxMB4XDTEyMTIwMzE0MDUx
-NFoXDTE5MDYzMDAwMDAwMFowgdoxCzAJBgNVBAYTAkRFMUQwQgYDVQQKEztCZXJ1
-ZnNha2FkZW1pZSBTYWNoc2VuIC0gU3RhYXRsaWNoZSBTdHVkaWVuYWthZGVtaWUg
-RHJlc2RlbjEWMBQGA1UECxMNUmVjaGVuemVudHJ1bTFBMD8GA1UEAxM4QkEgU2Fj
-aHNlbiAtIFN0YWF0bGljaGUgU3R1ZGllbmFrYWRlbWllIERyZXNkZW4gQ0EgLSBH
-MDExKjAoBgkqhkiG9w0BCQEWG3JlY2hlbnplbnRydW1AYmEtZHJlc2Rlbi5kZTCC
-ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKukHfbYWeGkqEh5TOB8Hdpb
-tNoLq09LKT3kR01GOOWYcUN23RH4OsAlRL3ZBKCD3c30MxE2QTsYNy4pS9jdjn5r
-sUe6kyGn+WW59SPjPbbVUOQAjO+AD5qXaOFdAARLxgb4cPEBt8wppaPBsyABBvp2
-qxRc5ItV+5Z3CB3WE+r8IVctId07OHBF98ikQhLlTzV1bAD9nds1HjkResX0Dsja
-IzYEjqMzHf2Z56uqPJdfYZ2OIEwl7HGT1dTW40QEXdbSpsJT9napsuDl8Ud+Qvhw
-knLkEpKFUNVTRAM/b51Zc2xXSYnUj0Ks/6+t64vi5nuad6DS4/FgStpapM6R3j8C
-AwEAAaOCAggwggIEMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYDVR0PAQH/BAQDAgEG
-MBEGA1UdIAQKMAgwBgYEVR0gADAdBgNVHQ4EFgQUwSb3v9+hEGCDdh47G60QzT+A
-fP8wHwYDVR0jBBgwFoAUSbfGz+g9H3/qRHsTKffxCnA+3mQwJgYDVR0RBB8wHYEb
-cmVjaGVuemVudHJ1bUBiYS1kcmVzZGVuLmRlMIGIBgNVHR8EgYAwfjA9oDugOYY3
-aHR0cDovL2NkcDEucGNhLmRmbi5kZS9nbG9iYWwtcm9vdC1jYS9wdWIvY3JsL2Nh
-Y3JsLmNybDA9oDugOYY3aHR0cDovL2NkcDIucGNhLmRmbi5kZS9nbG9iYWwtcm9v
-dC1jYS9wdWIvY3JsL2NhY3JsLmNybDCB1wYIKwYBBQUHAQEEgcowgccwMwYIKwYB
-BQUHMAGGJ2h0dHA6Ly9vY3NwLnBjYS5kZm4uZGUvT0NTUC1TZXJ2ZXIvT0NTUDBH
-BggrBgEFBQcwAoY7aHR0cDovL2NkcDEucGNhLmRmbi5kZS9nbG9iYWwtcm9vdC1j
-YS9wdWIvY2FjZXJ0L2NhY2VydC5jcnQwRwYIKwYBBQUHMAKGO2h0dHA6Ly9jZHAy
-LnBjYS5kZm4uZGUvZ2xvYmFsLXJvb3QtY2EvcHViL2NhY2VydC9jYWNlcnQuY3J0
-MA0GCSqGSIb3DQEBBQUAA4IBAQAYkPYZFKeKEXzWcMY59GYTDQEcoSmsAO8adDzr
-T72xZh19JQBGCFCYoFqaQDAEhZhVSKIXt0qITWbCGFMVwosAq0pgiGE8xA06LCH1
-ySITmHJiTYJlbK8OoECVmPXLuSqnty3KOSyfY2ExL9yF8HplggVnS8m284P5iOVO
-VV8dzi14YL3FbyNahm8hmx32fX3q2nwmlUk61eguWj48Hh7GGiKVlU8eABM9yDHb
-kW+YMnoalFyt2MUwhGPWkTOJAwtSxCueailTaPvEOgSljuSiu4R+BIaERD9NUnX7
-RMP2CIo9VtL1hLKAEcJAvEqysYnz5atANtU7DgRNbl5HJJ0Q
------END CERTIFICATE-----
-
 subject= /C=US/ST=GA/L=Atlanta/O=Southern Company Services, Inc./OU=Southern Company Services, Inc./CN=Southern Company External Issuing CA 1
 serial=1A407234000000000005
 -----BEGIN CERTIFICATE-----
@@ -27577,6 +27282,44 @@
 17j3ynVWWto+KwhLtVh4Vq4UNMiZo4gwJFO6UzKoYZ4lpwAT
 -----END CERTIFICATE-----
 
+subject= /C=US/ST=Texas/O=ATT Services Inc/OU=ATT Wi-Fi Services/CN=AWS Corporate CA G2
+serial=18D7E6B80EFA544B
+-----BEGIN CERTIFICATE-----
+MIIGITCCBQmgAwIBAgIIGNfmuA76VEswDQYJKoZIhvcNAQEFBQAwgZAxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIEwVUZXhhczEZMBcGA1UEChMQQVRUIFNlcnZpY2VzIElu
+YzEbMBkGA1UECxMSQVRUIFdpLUZpIFNlcnZpY2VzMTkwNwYDVQQDEzBBVFQgV2kt
+RmkgU2VydmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgRzIwHhcNMTMw
+NjA1MTgyNzU3WhcNMTgwNTMwMDAwMDAwWjBzMQswCQYDVQQGEwJVUzEOMAwGA1UE
+CBMFVGV4YXMxGTAXBgNVBAoTEEFUVCBTZXJ2aWNlcyBJbmMxGzAZBgNVBAsTEkFU
+VCBXaS1GaSBTZXJ2aWNlczEcMBoGA1UEAxMTQVdTIENvcnBvcmF0ZSBDQSBHMjCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJeLnLlXYOCPIryXxZ3WQLFU
+lLwbnOveZgTlmALqGvbSQ7SlukksydSvqlje14Mtl/7dDoGtiUN3YmWsBSblTMPd
+4iq2yY6CL/tMxxdztx9bpHc+XoSIeMR5aOScZhA5xgnczmXvbG3qQFTGAQNKA0To
+dIgtAX2XeGz0V12b1lH9rIwmcbGPQvR77IELnXqmv731V7k72Q4nWgK7h3C7K5rW
+NjNrF1RjKsIMrNe908ENtQkgaHCagVdP+G6B7l/aUxFNNNMWs05tw0BNF/iC7Nl6
+KKNZol1P7Jgl0gZpXbQzbW8ppDIPn+llhUe3Uu57s6fIC0GK6vb7q5Nftqx9yy8C
+AwEAAaOCApkwggKVMHAGCCsGAQUFBwEBBGQwYjA1BggrBgEFBQcwAoYpaHR0cDov
+L2NybC1iLnBraS53YXlwb3J0Lm5ldC9yb290Y2FnMi5jcnQwKQYIKwYBBQUHMAGG
+HWh0dHA6Ly9vY3NwLWIucGtpLndheXBvcnQubmV0MB0GA1UdDgQWBBR66UL8/8Nh
+qJNgEIRehegOyYBy2jASBgNVHRMBAf8ECDAGAQH/AgEAMB8GA1UdIwQYMBaAFPPT
+x14sRSZ+/ebktJS4BA85OxDeMIHjBgNVHSAEgdswgdgwDQYLKwYBBAGjSIN9AQMw
+gcYGCysGAQQBo0iDfQEBMIG2MIGABggrBgEFBQcCAjB0HnIAQwBvAHAAeQByAGkA
+ZwBoAHQAIAAoAGMAKQAgADIAMAAxADMAIABBAFQAVAAgAFcAaQAtAEYAaQAgAFMA
+ZQByAHYAaQBjAGUAcwAgAEEAbABsACAAUgBpAGcAaAB0AHMAIABSAGUAcwBlAHIA
+dgBlAGQwMQYIKwYBBQUHAgEWJWh0dHA6Ly9jcmwtYi5wa2kud2F5cG9ydC5uZXQv
+Y3BzLmh0bWwwgdYGA1UdHwSBzjCByzCByKAtoCuGKWh0dHA6Ly9jcmwtYi5wa2ku
+d2F5cG9ydC5uZXQvcm9vdGNhZzIuY3JsooGWpIGTMIGQMQswCQYDVQQGEwJVUzEO
+MAwGA1UECAwFVGV4YXMxGTAXBgNVBAoMEEFUVCBTZXJ2aWNlcyBJbmMxGzAZBgNV
+BAsMEkFUVCBXaS1GaSBTZXJ2aWNlczE5MDcGA1UEAwwwQVRUIFdpLUZpIFNlcnZp
+Y2VzIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IEcyMA4GA1UdDwEB/wQEAwIB
+BjANBgkqhkiG9w0BAQUFAAOCAQEAZvJm7QmMmXYf9DCfru5Bggft9VgsvIS/ARj8
+NJGMVX5HWtqUxHxC1Hn62MkwOYL7C1OVplFbfHCVtKncZCmUP/t2dtwEvzrmFROV
+YBlOuz7I4CkX+8GD7HTaZGwah6ldEJYxmQuPE4ePrGSkHw8wYvEVQmKda7+qB4Mb
+WVJ818So9NIo/z8cUnTKjgYcnPd1R1DvTRdyUWuBPt8sOkaF7C2aUasuCsAVKgaE
++hLTyY/iQyz8pVRdy+JFVqpxomkmDUo55ia8VV+j4DOLUVJqH+l91ykv3sOOlhAl
+RyjdMdAlgERz3IpbVdfMudMBux6X4YjVm6gz4TXSAtKBmkT6Ww==
+-----END CERTIFICATE-----
+
 subject= /C=HU/L=Budapest/O=NetLock Kft./OU=Tan\xC3\xBAs\xC3\xADtv\xC3\xA1nykiad\xC3\xB3k (Certification Services)/CN=NetLock K\xC3\xB6zjegyz\xC5\x91i (Class A) Tan\xC3\xBAs\xC3\xADtv\xC3\xA1nykiad\xC3\xB3
 serial=49412CE40024
 -----BEGIN CERTIFICATE-----
@@ -27615,6 +27358,44 @@
 9Qj/kjLzCNjKasB9OibwkeEdgtFI0qGtMbwcBXedoMaszxAYU5lnvA==
 -----END CERTIFICATE-----
 
+subject= /O=ZF/OU=ZF IT/CN=ZF CA 01
+serial=61D54AA9000000000002
+-----BEGIN CERTIFICATE-----
+MIIGJDCCBQygAwIBAgIKYdVKqQAAAAAAAjANBgkqhkiG9w0BAQUFADAyMQswCQYD
+VQQKDAJaRjEOMAwGA1UECwwFWkYgSVQxEzARBgNVBAMMClpGIFJvb3QgQ0EwHhcN
+MDkwMzAzMTMzNTQzWhcNMTcwMzAzMTM0NTQzWjAwMQswCQYDVQQKDAJaRjEOMAwG
+A1UECwwFWkYgSVQxETAPBgNVBAMMCFpGIENBIDAxMIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAp/moOhBdkhl5G2krjNtIfGRF6qf+bChj5m7KLcexVHt7
+1/bZRWtZbWAo8po8KZci3c6wM6k4TnqkbODrI1txABeJRLnv/DdUgwy5fjI1+eX4
+rYK6QIkMVCssN8h3oMTYJVTqhPoy7mCPY7BYmfCW/GfvZ/lMMPDUlTAlcfphy+PE
+ErvJYZgqVM2jzKxermtI7b2sGpLBLUx+bxzL77P+4bU017blI/mpHG1//nFTse47
+fEnAbJq+IsaK1LozaShpOdZKaCPtQLPTsTe3G3gjXgkhW/JVe8i5S6t7d6NWW8s8
+Nu0e+6yfTJnljtAgxbWv1oPwDD4+uK8am+BLq6618wIDAQABo4IDPDCCAzgwEgYD
+VR0TAQH/BAgwBgEB/wIBATAdBgNVHQ4EFgQUUAV2xXz5lroJh+HLUVhls5ITYBAw
+DgYDVR0PAQH/BAQDAgEGMIHwBgNVHSAEgegwgeUwgeIGDCsGAQQB5mcBAQECATCB
+0TAoBggrBgEFBQcCARYcaHR0cDovL3BraS56Zi13b3JsZC5jb20vY3BzLzCBpAYI
+KwYBBQUHAgIwgZcegZQAWgBGACAARgByAGkAZQBkAHIAaQBjAGgAcwBoAGEAZgBl
+AG4AIABDAGUAcgB0AGkAZgBpAGMAYQB0AGUAIABQAG8AbABpAGMAeQAgAGEAbgBk
+ACAAQwBlAHIAdABpAGYAaQBjAGEAdABpAG8AbgAgAFAAcgBhAGMAdABpAGMAZQAg
+AFMAdABhAHQAZQBtAGUAbgB0MB8GA1UdIwQYMBaAFPd/D+5vnJpeEyv73OfKH13n
+ayPEMIHoBgNVHR8EgeAwgd0wgdqggdeggdSGJGh0dHA6Ly9wa2kuemYtd29ybGQu
+Y29tL1pGUm9vdENBLmNybIaBq2xkYXA6Ly8vQ049WkYlMjBSb290JTIwQ0EsQ049
+Q0RQLENOPVB1YmxpYyUyMEtleSUyMFNlcnZpY2VzLENOPVNlcnZpY2VzLGNuPWNv
+bmZpZ3VyYXRpb24sZGM9emYtd29ybGQsZGM9Y29tP2NlcnRpZmljYXRlUmV2b2Nh
+dGlvbkxpc3Q/YmFzZT9vYmplY3RDbGFzcz1jUkxEaXN0cmlidXRpb25Qb2ludDCB
+8wYIKwYBBQUHAQEEgeYwgeMwMAYIKwYBBQUHMAKGJGh0dHA6Ly9wa2kuemYtd29y
+bGQuY29tL1pGUm9vdENBLmNydDCBrgYIKwYBBQUHMAKGgaFsZGFwOi8vL0NOPVpG
+JTIwUm9vdCUyMENBLENOPUFJQSxDTj1QdWJsaWMlMjBLZXklMjBTZXJ2aWNlcyxD
+Tj1TZXJ2aWNlcyxjbj1jb25maWd1cmF0aW9uLGRjPXpmLXdvcmxkLGRjPWNvbT9j
+QUNlcnRpZmljYXRlP2Jhc2U/b2JqZWN0Q2xhc3M9Y2VydGlmaWNhdGlvbkF1dGhv
+cml0eTANBgkqhkiG9w0BAQUFAAOCAQEAWU53o37C+FbMOAtciHs3W5IfHHJzAmmK
+aCIupqurKpFvPAYUERnfu5iHuzpSPGQVQApgYzuNXk2q1nbXjoLdZgdS2ZJUMi/u
+0pFen0F0zwp64olMrpGbJrO+CFj59lXdNqTtEr08L1qHf+Si+CP6wuYLpHhIt70C
+l3gxhtbH6X7SRouw1BzfyN4bUfzzG+7PZR0R9lCSv793DEBo7xO5ahvMwq/sbPvG
+8sGJtAtHHR4TH/veSX0NMVts9vQCGVAVCZ1d97aOhBgFLmBG7QRUG6Z5zI4Sd3rA
+Cd8watuxtx4QU8/jQalsiWsTK63A718MTwJFgQzqzdUg8RZpWO00mA==
+-----END CERTIFICATE-----
+
 subject= /C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert Global CA
 serial=4286ABA0
 -----BEGIN CERTIFICATE-----
@@ -29980,49 +29761,6 @@
 QNCDg8bQkiXvVCKXt4cNimgdCwucmWcUIryqAUvb0m++mdFDcA==
 -----END CERTIFICATE-----
 
-subject= /C=US/O=Entrust/OU=Certification Authorities/CN=Entrust Education Shared Service Provider
-serial=4C1FA387
------BEGIN CERTIFICATE-----
-MIIHETCCBfmgAwIBAgIETB+jhzANBgkqhkiG9w0BAQUFADCBgDELMAkGA1UEBhMC
-VVMxEDAOBgNVBAoTB0VudHJ1c3QxIjAgBgNVBAsTGUNlcnRpZmljYXRpb24gQXV0
-aG9yaXRpZXMxOzA5BgNVBAMTMkVudHJ1c3QgTWFuYWdlZCBTZXJ2aWNlcyBDb21t
-ZXJjaWFsIFB1YmxpYyBSb290IENBMB4XDTEwMDcwNzIwMTQ1NFoXDTMwMDYwNzIw
-NDQ1NFowdzELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VudHJ1c3QxIjAgBgNVBAsT
-GUNlcnRpZmljYXRpb24gQXV0aG9yaXRpZXMxMjAwBgNVBAMTKUVudHJ1c3QgRWR1
-Y2F0aW9uIFNoYXJlZCBTZXJ2aWNlIFByb3ZpZGVyMIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEA61FCE8DNTfaRvSVAPhlu+vPjPX+4w5egBREnz/8ObdJA
-W64b6UPDITjOSTzma2aomBsawDZxA6K/vXz1UEAfGC86RzfbZGLCLlMoSs8CT+1T
-1rLZmd7VLigN4yIen0qmB17Q5yMoTt/Ho9mYH+LRBapW+boLbRyMVRyNg6zxAfv2
-WHXTDRtS69Hz+OTkUnxKzZzEOXYIgHzbqMJlbZIqxDUuVFIkgn0A5+fp+v+bloEc
-ISioCSsjTMr4JPc5O0xNHOga/KH1a/ahZVU6OWlR0Pke3G4WsOamzhnZkATf4oKl
-fQbXzeGEQrMFEgO/d4LVa/wnaYb+z5o1GpyAuZYVBwIDAQABo4IDmTCCA5UwDgYD
-VR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQAwPAYDVR0gBDUwMzAPBg1g
-hkgBhvprgUgDCgIEMA8GDWCGSAGG+muBSAMKAgUwDwYNYIZIAYb6a4FIAwoCBjCC
-AT8GCCsGAQUFBwEBBIIBMTCCAS0wVQYIKwYBBQUHMAKGSWh0dHA6Ly9jb21wdWJ3
-ZWIubWFuYWdlZC5lbnRydXN0LmNvbS9BSUEvQ2VydHNJc3N1ZWRUb0VNU0NvbVB1
-YlJvb3RDQS5wN2MwgdMGCCsGAQUFBzAChoHGbGRhcDovL2NvbXB1YnNoYWQubWFu
-YWdlZC5lbnRydXN0LmNvbS9vdT1FbnRydXN0JTIwTWFuYWdlZCUyMFNlcnZpY2Vz
-JTIwQ29tbWVyY2lhbCUyMFB1YmxpYyUyMFJvb3QlMjBDQSxvdT1DZXJ0aWZpY2F0
-aW9uJTIwQXV0aG9yaXRpZXMsbz1FbnRydXN0LGM9VVM/Y0FDZXJ0aWZpY2F0ZTti
-aW5hcnksY3Jvc3NDZXJ0aWZpY2F0ZVBhaXI7YmluYXJ5MIIBrAYDVR0fBIIBozCC
-AZ8wgf6ggfuggfiGPWh0dHA6Ly9jb21wdWJ3ZWIubWFuYWdlZC5lbnRydXN0LmNv
-bS9DUkxzL0VNU0NvbVB1YlJvb3RDQS5jcmyGgbZsZGFwOi8vY29tcHVic2hhZC5t
-YW5hZ2VkLmVudHJ1c3QuY29tL2NuPUVudHJ1c3QlMjBNYW5hZ2VkJTIwU2Vydmlj
-ZXMlMjBDb21tZXJjaWFsJTIwUHVibGljJTIwUm9vdCUyMENBLG91PUNlcnRpZmlj
-YXRpb24lMjBBdXRob3JpdGllcyxvPUVudHJ1c3QsYz1VUz9jZXJ0aWZpY2F0ZVJl
-dm9jYXRpb25MaXN0O2JpbmFyeTCBm6CBmKCBlaSBkjCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAoTB0VudHJ1c3QxIjAgBgNVBAsTGUNlcnRpZmljYXRpb24gQXV0aG9y
-aXRpZXMxOzA5BgNVBAMTMkVudHJ1c3QgTWFuYWdlZCBTZXJ2aWNlcyBDb21tZXJj
-aWFsIFB1YmxpYyBSb290IENBMQ0wCwYDVQQDEwRDUkwxMB8GA1UdIwQYMBaAFMAy
-SQVmZoOHGWm5roMlBhmLBdd0MB0GA1UdDgQWBBQi3aL6WE6DPlOLAYATFjXUgRW2
-fzANBgkqhkiG9w0BAQUFAAOCAQEAdDiRYUbRjCUKiNQUujn8G1vGvnWo1kvnYx/l
-BUL07T1vthkTVlwBQmVjF6uEo9XrrFACGuj1yH9VRgMq7X+pMBfk4ZEfmyrmUE7Z
-Fq0Hu0TIS5jdLpJZcKwly/KLt5Ss+i/k6OjQawzKao1t1T5TL97cjnx1pDH0HNUY
-gZeDyIDG37twbg22Jd4I5dd+M96YL87G7Mf2TpIS/XrO2Wd84UElWzmMjWIoK0oN
-J3vgr+TcKC8ErzMS+AuNfMuBZL+lSoi/yOOEAUS7Sl/GxeSmfr05FPenD3vb6r4D
-auDyWKXJ30ojO9ymLTuiuiFV5edD7rs0ixxRGA0MO18VFaurdw==
------END CERTIFICATE-----
-
 subject= /C=CH/O=Helsana Gruppe/CN=Helsana Gruppe Service ICA 01
 serial=F8B20979B0014ABEF790D86B70E61B
 -----BEGIN CERTIFICATE-----
@@ -30238,6 +29976,50 @@
 +RTHUyFODEAEyyIV9kGrTkTTIqwQTq+sKKtk3IkqkyUEUWEnHBDiwL0rr4xjRQ==
 -----END CERTIFICATE-----
 
+subject= /C=US/ST=Texas/O=ATT Services Inc/OU=ATT Wi-Fi Services/CN=ATT Wi-Fi Services Root Certificate Authority G2
+serial=5CD7D896BAD5C97711BC14CF0ED35F2062
+-----BEGIN CERTIFICATE-----
+MIIHJTCCBg2gAwIBAgIRXNfYlrrVyXcRvBTPDtNfIGIwDQYJKoZIhvcNAQEFBQAw
+XDELMAkGA1UEBhMCQkUxFTATBgNVBAsTDFRydXN0ZWQgUm9vdDEZMBcGA1UEChMQ
+R2xvYmFsU2lnbiBudi1zYTEbMBkGA1UEAxMSVHJ1c3RlZCBSb290IENBIEcyMB4X
+DTEzMDUzMDAwMDAwMFoXDTE4MDUzMDAwMDAwMFowgZAxCzAJBgNVBAYTAlVTMQ4w
+DAYDVQQIEwVUZXhhczEZMBcGA1UEChMQQVRUIFNlcnZpY2VzIEluYzEbMBkGA1UE
+CxMSQVRUIFdpLUZpIFNlcnZpY2VzMTkwNwYDVQQDEzBBVFQgV2ktRmkgU2Vydmlj
+ZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgRzIwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQCDh9LO56ZXCaAKXdO/ZiuCfrKLwjJoYTZ9xJbPKmR+
+p5w/Zzw+UG8zdRaOgXBnXDcHvdTUcNcmOzglPrS2Xs+aiUWgNd4VgzafIofq/shP
+6Gxnquy8qdqnpDrrudUxTwgVisuSG/yiXsZva6OOmkyrR6N1Bu25+tb0oSnqPeGM
+5YXPjjWBIJtoRlUPoDgHr29Prv1/mLZuBqgUzFuN3Uynx1pN+hf97HfUDaHo/zMB
+FBC8gjjv77zOjBEK/P5VpVunN9a7sl+FBvaW+yQy9FG5TR0narXSwBJLijPgxUU9
+2TjW4+8oMnfVcu6ZBmqwBUNNorFfIpLTJqwPXJFvF4UXAgMBAAGjggOrMIIDpzAO
+BgNVHQ8BAf8EBAMCAQYwggELBgNVHSAEggECMIH/MHEGCisGAQQBoDIBPAEwYzAy
+BggrBgEFBQcCARYmaHR0cHM6Ly93d3cuZ2xvYmFsc2lnbi5jb20vcmVwb3NpdG9y
+eS8wLQYIKwYBBQUHAgIwIQwfR2xvYmFsU2lnbiBUcnVzdGVkIFJvb3QgUHJvZ3Jh
+bTCBiQYLKwYBBAGjSIN9AQEwejAvBggrBgEFBQcCARYjaHR0cDovL2NybC5wa2ku
+d2F5cG9ydC5uZXQvY3BzLmh0bWwwRwYIKwYBBQUHAgIwOww5Q29weXJpZ2h0IChj
+KSAyMDEzIEFUVCBXaS1GaSBTZXJ2aWNlcyBBbGwgUmlnaHRzIFJlc2VydmVkMBIG
+A1UdEwEB/wQIMAYBAf8CAQEwggFLBgNVHR4EggFCMIIBPqCCAQgwDYILd2F5cG9y
+dC5uZXQwDYILYXR0d2lmaS5jb20wEIIOc3VwZXJjbGljay5uZXQwEIIOc3VwZXJj
+bGljay5jb20wDYELd2F5cG9ydC5uZXQwDoEMLndheXBvcnQubmV0MA2BC2F0dHdp
+ZmkuY29tMA6BDC5hdHR3aWZpLmNvbTAQgQ5zdXBlcmNsaWNrLm5ldDARgQ8uc3Vw
+ZXJjbGljay5uZXQwEIEOc3VwZXJjbGljay5jb20wEYEPLnN1cGVyY2xpY2suY29t
+MDykOjA4MQswCQYDVQQGEwJVUzEOMAwGA1UECBMFVGV4YXMxGTAXBgNVBAoTEEFU
+VCBTZXJ2aWNlcyBJbmOhMDAKhwgAAAAAAAAAADAihyAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAADAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIw
+PQYDVR0fBDYwNDAyoDCgLoYsaHR0cDovL2NybC5nbG9iYWxzaWduLmNvbS9ncy90
+cnVzdHJvb3RnMi5jcmwwgYQGCCsGAQUFBwEBBHgwdjAzBggrBgEFBQcwAYYnaHR0
+cDovL29jc3AyLmdsb2JhbHNpZ24uY29tL3RydXN0cm9vdGcyMD8GCCsGAQUFBzAC
+hjNodHRwOi8vc2VjdXJlLmdsb2JhbHNpZ24uY29tL2NhY2VydC90cnVzdHJvb3Rn
+Mi5jcnQwHQYDVR0OBBYEFPPTx14sRSZ+/ebktJS4BA85OxDeMB8GA1UdIwQYMBaA
+FBT25YsxtkWASkxt/MKHico2w5BiMA0GCSqGSIb3DQEBBQUAA4IBAQCF3mZKOjut
+iscy/y3TgWkdHN7lHofmM/40gB7PyPiTOBKbQsSaSYuYr1Ls1xDEROpX5qWlxFMV
+6+o9irKf8pAaA7q3yInNiCb2o/1BPHAB4QOZM/r2sZLtPPkDxSi7GNglj2wTEnD6
+OB6yyMlgUTpDhk8n760DWFLMr28D23s72vK850AN5tmMNi7qAalmyiZBcVeE4Dik
+E94FxMQKec9f447ezNiObrxPUCzUaN+2qGGACwN08/8JShOgV5YLy2IJtBj7B9KT
+F1DP/ltQA86fGWUena2hSQzAPfwf6aTvLWz6DPUNuy3KNiJbzuvET/d4zT/MzqjP
+TwsUSW6g5/Fg
+-----END CERTIFICATE-----
+
 subject= /C=FR/O=Ministere education nationale (MENESR)/OU=110 043 015/CN=AC Infrastructures
 serial=269C1B8AB1DACF0ECB7ABC4CB8B9C1B4
 -----BEGIN CERTIFICATE-----
@@ -31199,29 +30981,6 @@
 G0pID0nsP9iH2xyG+8F+Fxcxt7ve0T2YGYfgMg==
 -----END CERTIFICATE-----
 
-subject= /C=ZA/O=Thawte Consulting (Pty) Ltd./CN=Thawte Personal Freemail Issuing CA
-serial=0D
------BEGIN CERTIFICATE-----
-MIIDPzCCAqigAwIBAgIBDTANBgkqhkiG9w0BAQUFADCB0TELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt
-YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu
-Y29tMB4XDTAzMDcxNzAwMDAwMFoXDTEzMDcxNjIzNTk1OVowYjELMAkGA1UEBhMC
-WkExJTAjBgNVBAoTHFRoYXd0ZSBDb25zdWx0aW5nIChQdHkpIEx0ZC4xLDAqBgNV
-BAMTI1RoYXd0ZSBQZXJzb25hbCBGcmVlbWFpbCBJc3N1aW5nIENBMIGfMA0GCSqG
-SIb3DQEBAQUAA4GNADCBiQKBgQDEpjxVc1X7TrnKmVoeaMB1BHCd3+n/ox7svc31
-W/Iadr1/DDph8r9RzgHU5VAKMNcCY1osiRVwjt3J8CuFWqo/cVbLrzwLB+fxH5E2
-JCoTzyvV84J3PQO+K/67GD4Hv0CAAmTXp6a7n2XRxSpUhQ9IBH+nttE8YQRAHmQZ
-cmC3+wIDAQABo4GUMIGRMBIGA1UdEwEB/wQIMAYBAf8CAQAwQwYDVR0fBDwwOjA4
-oDagNIYyaHR0cDovL2NybC50aGF3dGUuY29tL1RoYXd0ZVBlcnNvbmFsRnJlZW1h
-aWxDQS5jcmwwCwYDVR0PBAQDAgEGMCkGA1UdEQQiMCCkHjAcMRowGAYDVQQDExFQ
-cml2YXRlTGFiZWwyLTEzODANBgkqhkiG9w0BAQUFAAOBgQBIjNFQg+oLLswNo2as
-Zw9/r6y+whehQ5aUnX9MIbj4Nh+qLZ82L8D0HFAgk3A8/a3hYWLD2ToZfoSxmRsA
-xRoLgnSeJVCUYsfbJ3FXJY3dqZw5jowgT2Vfldr394fWxghOrvbqNOUQGls1TXfj
-ViF4gtwhGTXeJLHTHUb/XV9lTw==
------END CERTIFICATE-----
-
 subject= /O=Thawte/OU=Thawte Universal CA Root/CN=Thawte Universal CA Root
 serial=00
 -----BEGIN CERTIFICATE-----
@@ -35141,36 +34900,6 @@
 JWhyg2e9VrCNAb0q98xLvYeluocgTEIRQa0QFzuM
 -----END CERTIFICATE-----
 
-subject= /DC=com/DC=google/DC=corp/CN=Google Internal 802.1x Authority
-serial=171EDFA0000200000056
------BEGIN CERTIFICATE-----
-MIIEpzCCA4+gAwIBAgIKFx7foAACAAAAVjANBgkqhkiG9w0BAQUFADAkMSIwIAYD
-VQQDExlHb29nbGUgSW50ZXJuYWwgQXV0aG9yaXR5MB4XDTA4MDcyNDE4NTM0NFoX
-DTEzMDcyMzE4NTM0NFowbjETMBEGCgmSJomT8ixkARkWA2NvbTEWMBQGCgmSJomT
-8ixkARkWBmdvb2dsZTEUMBIGCgmSJomT8ixkARkWBGNvcnAxKTAnBgNVBAMTIEdv
-b2dsZSBJbnRlcm5hbCA4MDIuMXggQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA
-A4GNADCBiQKBgQCqwd9g4RI7jbNQogBhIl+Gqusdp4fkFMXXJoWNKQs1Y984W+PU
-1pTeliaU8ZuYCwX+3cTyIy+mklW0P6Jo7c27gP3PRIt8pzhul74rHoj2wHMyOdrA
-AHnR0skxXSVnZSCWcJN2TOHo2qCRPjKaFJktIBwRWNif4V8k811OPKsnKQIDAQAB
-o4ICEzCCAg8wDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPVTn5K0HORfXQ0n2
-MK14DyKxAKIwCwYDVR0PBAQDAgGGMBAGCSsGAQQBgjcVAQQDAgEBMCMGCSsGAQQB
-gjcVAgQWBBSCW3biKynr5hPE0QRtLd3iGNxn2zAZBgkrBgEEAYI3FAIEDB4KAFMA
-dQBiAEMAQTAfBgNVHSMEGDAWgBT8y6gXnwrjdxvg8OKukF57AOYGNzCCAQIGA1Ud
-HwSB+jCB9zCB9KCB8aCB7oY8aHR0cDovL2NybC5jb3JwLmdvb2dsZS5jb20vR29v
-Z2xlJTIwSW50ZXJuYWwlMjBBdXRob3JpdHkuY3JshjtodHRwOi8vY3JsLmV4dC5n
-b29nbGUuY29tL0dvb2dsZSUyMEludGVybmFsJTIwQXV0aG9yaXR5LmNybIY4aHR0
-cDovL2NybC5jb3JwLmdvb2dsZS5jb20vR29vZ2xlJTIwSW50ZXJuYWwlMjBBdXRo
-b3JpdHmGN2h0dHA6Ly9jcmwuZXh0Lmdvb2dsZS5jb20vR29vZ2xlJTIwSW50ZXJu
-YWwlMjBBdXRob3JpdHkwVwYIKwYBBQUHAQEESzBJMEcGCCsGAQUFBzAChjtodHRw
-Oi8vY2EuY29ycC5nb29nbGUuY29tL0dvb2dsZSUyMEludGVybmFsJTIwQXV0aG9y
-aXR5LmNydDANBgkqhkiG9w0BAQUFAAOCAQEAGse/Rc1JG+/HrVh0MV0MnRSOA3cF
-EXvXew53cJZJhIegaBMVMVIxhxChreHkgexKK4nC2v4ZcMUKr3RRAB1oa1IW9WBG
-BgiivFLyTeHatiqQU+of6q+fAf21vFBalqs0s+LMuCzZMAvYPFcJo/IDED4CgoMv
-IZam6BMizwCP0CQFF40Qlz7pKnfS7lND89L35ayScV+Vq15B/YGN4on/UYPWfOII
-SeTuMFJtOXMhkyeTJ0IK+AzvbN+kIJfpBbJzzC0OIzTi4RZVbAienIvDy5rXmNhQ
-HZJ8wGfPnebpQndktqc6yX9LV000ghC9cAiPQEHT2ag10bbD2zNuDxV1Og==
------END CERTIFICATE-----
-
 subject= /C=BE/O=GlobalSign nv-sa/OU=Root CA/CN=GlobalSign Root CA
 serial=-3B44273F3500A95AEE2CA9699E66DDD0
 -----BEGIN CERTIFICATE-----
diff --git a/lib/protorpc/protorpc/messages.py b/lib/protorpc/protorpc/messages.py
index 7410726..103d60b 100644
--- a/lib/protorpc/protorpc/messages.py
+++ b/lib/protorpc/protorpc/messages.py
@@ -1495,6 +1495,23 @@
                                        repeated=repeated,
                                        variant=variant)
 
+  def __set__(self, message_instance, value):
+    """Set value on message.
+
+    Args:
+      message_instance: Message instance to set value on.
+      value: Value to set on message.
+    """
+    message_type = self.type
+    if isinstance(message_type, type) and issubclass(message_type, Message):
+      if self.repeated:
+        if value and isinstance(value, (list, tuple)):
+          value = [(message_type(**v) if isinstance(v, dict) else v)
+                   for v in value]
+      elif isinstance(value, dict):
+        value = message_type(**value)
+    super(MessageField, self).__set__(message_instance, value)
+
   @property
   def type(self):
     """Message type used for field."""
diff --git a/lib/protorpc/protorpc/protojson.py b/lib/protorpc/protorpc/protojson.py
index d3dd0c6..4dcb706 100644
--- a/lib/protorpc/protorpc/protojson.py
+++ b/lib/protorpc/protorpc/protojson.py
@@ -313,8 +313,9 @@
       except ValueError, err:
         raise messages.DecodeError(err)
 
-    elif isinstance(field, messages.MessageField):
-      return self.__decode_dictionary(field.message_type, value)
+    elif (isinstance(field, messages.MessageField) and
+          issubclass(field.type, messages.Message)):
+      return self.__decode_dictionary(field.type, value)
 
     elif (isinstance(field, messages.FloatField) and
           isinstance(value, (int, long, basestring))):
diff --git a/lib/python-gflags/__init__.py b/lib/python-gflags/__init__.py
new file mode 100644
index 0000000..77e4285
--- /dev/null
+++ b/lib/python-gflags/__init__.py
@@ -0,0 +1 @@
+from gflags import *
diff --git a/oauth2client/__init__.py b/oauth2client/__init__.py
new file mode 100644
index 0000000..13d949f
--- /dev/null
+++ b/oauth2client/__init__.py
@@ -0,0 +1,5 @@
+__version__ = "1.0"
+
+GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
+GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
+GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
diff --git a/oauth2client/anyjson.py b/oauth2client/anyjson.py
new file mode 100644
index 0000000..ae21c33
--- /dev/null
+++ b/oauth2client/anyjson.py
@@ -0,0 +1,32 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility module to import a JSON module
+
+Hides all the messy details of exactly where
+we get a simplejson module from.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+try: # pragma: no cover
+  # Should work for Python2.6 and higher.
+  import json as simplejson
+except ImportError: # pragma: no cover
+  try:
+    import simplejson
+  except ImportError:
+    # Try to import from django, should work on App Engine
+    from django.utils import simplejson
diff --git a/oauth2client/appengine.py b/oauth2client/appengine.py
new file mode 100644
index 0000000..570f0f5
--- /dev/null
+++ b/oauth2client/appengine.py
@@ -0,0 +1,902 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for Google App Engine
+
+Utilities for making it easier to use OAuth 2.0 on Google App Engine.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import base64
+import cgi
+import httplib2
+import logging
+import os
+import pickle
+import time
+import urllib
+import urlparse
+
+from google.appengine.api import app_identity
+from google.appengine.api import memcache
+from google.appengine.api import users
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp.util import login_required
+from google.appengine.ext.webapp.util import run_wsgi_app
+from oauth2client import GOOGLE_AUTH_URI
+from oauth2client import GOOGLE_REVOKE_URI
+from oauth2client import GOOGLE_TOKEN_URI
+from oauth2client import clientsecrets
+from oauth2client import util
+from oauth2client import xsrfutil
+from oauth2client.anyjson import simplejson
+from oauth2client.client import AccessTokenRefreshError
+from oauth2client.client import AssertionCredentials
+from oauth2client.client import Credentials
+from oauth2client.client import Flow
+from oauth2client.client import OAuth2WebServerFlow
+from oauth2client.client import Storage
+
+# TODO(dhermes): Resolve import issue.
+# This is a temporary fix for a Google internal issue.
+try:
+  from google.appengine.ext import ndb
+except ImportError:
+  ndb = None
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+logger = logging.getLogger(__name__)
+
+OAUTH2CLIENT_NAMESPACE = 'oauth2client#ns'
+
+XSRF_MEMCACHE_ID = 'xsrf_secret_key'
+
+
+def _safe_html(s):
+  """Escape text to make it safe to display.
+
+  Args:
+    s: string, The text to escape.
+
+  Returns:
+    The escaped text as a string.
+  """
+  return cgi.escape(s, quote=1).replace("'", '&#39;')
+
+
+class InvalidClientSecretsError(Exception):
+  """The client_secrets.json file is malformed or missing required fields."""
+
+
+class InvalidXsrfTokenError(Exception):
+  """The XSRF token is invalid or expired."""
+
+
+class SiteXsrfSecretKey(db.Model):
+  """Storage for the sites XSRF secret key.
+
+  There will only be one instance stored of this model, the one used for the
+  site.
+  """
+  secret = db.StringProperty()
+
+if ndb is not None:
+  class SiteXsrfSecretKeyNDB(ndb.Model):
+    """NDB Model for storage for the sites XSRF secret key.
+
+    Since this model uses the same kind as SiteXsrfSecretKey, it can be used
+    interchangeably. This simply provides an NDB model for interacting with the
+    same data the DB model interacts with.
+
+    There should only be one instance stored of this model, the one used for the
+    site.
+    """
+    secret = ndb.StringProperty()
+
+    @classmethod
+    def _get_kind(cls):
+      """Return the kind name for this class."""
+      return 'SiteXsrfSecretKey'
+
+
+def _generate_new_xsrf_secret_key():
+  """Returns a random XSRF secret key.
+  """
+  return os.urandom(16).encode("hex")
+
+
+def xsrf_secret_key():
+  """Return the secret key for use for XSRF protection.
+
+  If the Site entity does not have a secret key, this method will also create
+  one and persist it.
+
+  Returns:
+    The secret key.
+  """
+  secret = memcache.get(XSRF_MEMCACHE_ID, namespace=OAUTH2CLIENT_NAMESPACE)
+  if not secret:
+    # Load the one and only instance of SiteXsrfSecretKey.
+    model = SiteXsrfSecretKey.get_or_insert(key_name='site')
+    if not model.secret:
+      model.secret = _generate_new_xsrf_secret_key()
+      model.put()
+    secret = model.secret
+    memcache.add(XSRF_MEMCACHE_ID, secret, namespace=OAUTH2CLIENT_NAMESPACE)
+
+  return str(secret)
+
+
+class AppAssertionCredentials(AssertionCredentials):
+  """Credentials object for App Engine Assertion Grants
+
+  This object will allow an App Engine application to identify itself to Google
+  and other OAuth 2.0 servers that can verify assertions. It can be used for the
+  purpose of accessing data stored under an account assigned to the App Engine
+  application itself.
+
+  This credential does not require a flow to instantiate because it represents
+  a two legged flow, and therefore has all of the required information to
+  generate and refresh its own access tokens.
+  """
+
+  @util.positional(2)
+  def __init__(self, scope, **kwargs):
+    """Constructor for AppAssertionCredentials
+
+    Args:
+      scope: string or iterable of strings, scope(s) of the credentials being
+        requested.
+    """
+    self.scope = util.scopes_to_string(scope)
+
+    # Assertion type is no longer used, but still in the parent class signature.
+    super(AppAssertionCredentials, self).__init__(None)
+
+  @classmethod
+  def from_json(cls, json):
+    data = simplejson.loads(json)
+    return AppAssertionCredentials(data['scope'])
+
+  def _refresh(self, http_request):
+    """Refreshes the access_token.
+
+    Since the underlying App Engine app_identity implementation does its own
+    caching we can skip all the storage hoops and just to a refresh using the
+    API.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the refresh request.
+
+    Raises:
+      AccessTokenRefreshError: When the refresh fails.
+    """
+    try:
+      scopes = self.scope.split()
+      (token, _) = app_identity.get_access_token(scopes)
+    except app_identity.Error, e:
+      raise AccessTokenRefreshError(str(e))
+    self.access_token = token
+
+
+class FlowProperty(db.Property):
+  """App Engine datastore Property for Flow.
+
+  Utility property that allows easy storage and retrieval of an
+  oauth2client.Flow"""
+
+  # Tell what the user type is.
+  data_type = Flow
+
+  # For writing to datastore.
+  def get_value_for_datastore(self, model_instance):
+    flow = super(FlowProperty,
+                 self).get_value_for_datastore(model_instance)
+    return db.Blob(pickle.dumps(flow))
+
+  # For reading from datastore.
+  def make_value_from_datastore(self, value):
+    if value is None:
+      return None
+    return pickle.loads(value)
+
+  def validate(self, value):
+    if value is not None and not isinstance(value, Flow):
+      raise db.BadValueError('Property %s must be convertible '
+                          'to a FlowThreeLegged instance (%s)' %
+                          (self.name, value))
+    return super(FlowProperty, self).validate(value)
+
+  def empty(self, value):
+    return not value
+
+
+if ndb is not None:
+  class FlowNDBProperty(ndb.PickleProperty):
+    """App Engine NDB datastore Property for Flow.
+
+    Serves the same purpose as the DB FlowProperty, but for NDB models. Since
+    PickleProperty inherits from BlobProperty, the underlying representation of
+    the data in the datastore will be the same as in the DB case.
+
+    Utility property that allows easy storage and retrieval of an
+    oauth2client.Flow
+    """
+
+    def _validate(self, value):
+      """Validates a value as a proper Flow object.
+
+      Args:
+        value: A value to be set on the property.
+
+      Raises:
+        TypeError if the value is not an instance of Flow.
+      """
+      logger.info('validate: Got type %s', type(value))
+      if value is not None and not isinstance(value, Flow):
+        raise TypeError('Property %s must be convertible to a flow '
+                        'instance; received: %s.' % (self._name, value))
+
+
+class CredentialsProperty(db.Property):
+  """App Engine datastore Property for Credentials.
+
+  Utility property that allows easy storage and retrieval of
+  oath2client.Credentials
+  """
+
+  # Tell what the user type is.
+  data_type = Credentials
+
+  # For writing to datastore.
+  def get_value_for_datastore(self, model_instance):
+    logger.info("get: Got type " + str(type(model_instance)))
+    cred = super(CredentialsProperty,
+                 self).get_value_for_datastore(model_instance)
+    if cred is None:
+      cred = ''
+    else:
+      cred = cred.to_json()
+    return db.Blob(cred)
+
+  # For reading from datastore.
+  def make_value_from_datastore(self, value):
+    logger.info("make: Got type " + str(type(value)))
+    if value is None:
+      return None
+    if len(value) == 0:
+      return None
+    try:
+      credentials = Credentials.new_from_json(value)
+    except ValueError:
+      credentials = None
+    return credentials
+
+  def validate(self, value):
+    value = super(CredentialsProperty, self).validate(value)
+    logger.info("validate: Got type " + str(type(value)))
+    if value is not None and not isinstance(value, Credentials):
+      raise db.BadValueError('Property %s must be convertible '
+                          'to a Credentials instance (%s)' %
+                            (self.name, value))
+    #if value is not None and not isinstance(value, Credentials):
+    #  return None
+    return value
+
+
+if ndb is not None:
+  # TODO(dhermes): Turn this into a JsonProperty and overhaul the Credentials
+  #                and subclass mechanics to use new_from_dict, to_dict,
+  #                from_dict, etc.
+  class CredentialsNDBProperty(ndb.BlobProperty):
+    """App Engine NDB datastore Property for Credentials.
+
+    Serves the same purpose as the DB CredentialsProperty, but for NDB models.
+    Since CredentialsProperty stores data as a blob and this inherits from
+    BlobProperty, the data in the datastore will be the same as in the DB case.
+
+    Utility property that allows easy storage and retrieval of Credentials and
+    subclasses.
+    """
+    def _validate(self, value):
+      """Validates a value as a proper credentials object.
+
+      Args:
+        value: A value to be set on the property.
+
+      Raises:
+        TypeError if the value is not an instance of Credentials.
+      """
+      logger.info('validate: Got type %s', type(value))
+      if value is not None and not isinstance(value, Credentials):
+        raise TypeError('Property %s must be convertible to a credentials '
+                        'instance; received: %s.' % (self._name, value))
+
+    def _to_base_type(self, value):
+      """Converts our validated value to a JSON serialized string.
+
+      Args:
+        value: A value to be set in the datastore.
+
+      Returns:
+        A JSON serialized version of the credential, else '' if value is None.
+      """
+      if value is None:
+        return ''
+      else:
+        return value.to_json()
+
+    def _from_base_type(self, value):
+      """Converts our stored JSON string back to the desired type.
+
+      Args:
+        value: A value from the datastore to be converted to the desired type.
+
+      Returns:
+        A deserialized Credentials (or subclass) object, else None if the
+            value can't be parsed.
+      """
+      if not value:
+        return None
+      try:
+        # Uses the from_json method of the implied class of value
+        credentials = Credentials.new_from_json(value)
+      except ValueError:
+        credentials = None
+      return credentials
+
+
+class StorageByKeyName(Storage):
+  """Store and retrieve a credential to and from the App Engine datastore.
+
+  This Storage helper presumes the Credentials have been stored as a
+  CredentialsProperty or CredentialsNDBProperty on a datastore model class, and
+  that entities are stored by key_name.
+  """
+
+  @util.positional(4)
+  def __init__(self, model, key_name, property_name, cache=None):
+    """Constructor for Storage.
+
+    Args:
+      model: db.Model or ndb.Model, model class
+      key_name: string, key name for the entity that has the credentials
+      property_name: string, name of the property that is a CredentialsProperty
+        or CredentialsNDBProperty.
+      cache: memcache, a write-through cache to put in front of the datastore.
+        If the model you are using is an NDB model, using a cache will be
+        redundant since the model uses an instance cache and memcache for you.
+    """
+    self._model = model
+    self._key_name = key_name
+    self._property_name = property_name
+    self._cache = cache
+
+  def _is_ndb(self):
+    """Determine whether the model of the instance is an NDB model.
+
+    Returns:
+      Boolean indicating whether or not the model is an NDB or DB model.
+    """
+    # issubclass will fail if one of the arguments is not a class, only need
+    # worry about new-style classes since ndb and db models are new-style
+    if isinstance(self._model, type):
+      if ndb is not None and issubclass(self._model, ndb.Model):
+        return True
+      elif issubclass(self._model, db.Model):
+        return False
+
+    raise TypeError('Model class not an NDB or DB model: %s.' % (self._model,))
+
+  def _get_entity(self):
+    """Retrieve entity from datastore.
+
+    Uses a different model method for db or ndb models.
+
+    Returns:
+      Instance of the model corresponding to the current storage object
+          and stored using the key name of the storage object.
+    """
+    if self._is_ndb():
+      return self._model.get_by_id(self._key_name)
+    else:
+      return self._model.get_by_key_name(self._key_name)
+
+  def _delete_entity(self):
+    """Delete entity from datastore.
+
+    Attempts to delete using the key_name stored on the object, whether or not
+    the given key is in the datastore.
+    """
+    if self._is_ndb():
+      ndb.Key(self._model, self._key_name).delete()
+    else:
+      entity_key = db.Key.from_path(self._model.kind(), self._key_name)
+      db.delete(entity_key)
+
+  def locked_get(self):
+    """Retrieve Credential from datastore.
+
+    Returns:
+      oauth2client.Credentials
+    """
+    if self._cache:
+      json = self._cache.get(self._key_name)
+      if json:
+        return Credentials.new_from_json(json)
+
+    credentials = None
+    entity = self._get_entity()
+    if entity is not None:
+      credentials = getattr(entity, self._property_name)
+      if credentials and hasattr(credentials, 'set_store'):
+        credentials.set_store(self)
+        if self._cache:
+          self._cache.set(self._key_name, credentials.to_json())
+
+    return credentials
+
+  def locked_put(self, credentials):
+    """Write a Credentials to the datastore.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    entity = self._model.get_or_insert(self._key_name)
+    setattr(entity, self._property_name, credentials)
+    entity.put()
+    if self._cache:
+      self._cache.set(self._key_name, credentials.to_json())
+
+  def locked_delete(self):
+    """Delete Credential from datastore."""
+
+    if self._cache:
+      self._cache.delete(self._key_name)
+
+    self._delete_entity()
+
+
+class CredentialsModel(db.Model):
+  """Storage for OAuth 2.0 Credentials
+
+  Storage of the model is keyed by the user.user_id().
+  """
+  credentials = CredentialsProperty()
+
+
+if ndb is not None:
+  class CredentialsNDBModel(ndb.Model):
+    """NDB Model for storage of OAuth 2.0 Credentials
+
+    Since this model uses the same kind as CredentialsModel and has a property
+    which can serialize and deserialize Credentials correctly, it can be used
+    interchangeably with a CredentialsModel to access, insert and delete the
+    same entities. This simply provides an NDB model for interacting with the
+    same data the DB model interacts with.
+
+    Storage of the model is keyed by the user.user_id().
+    """
+    credentials = CredentialsNDBProperty()
+
+    @classmethod
+    def _get_kind(cls):
+      """Return the kind name for this class."""
+      return 'CredentialsModel'
+
+
+def _build_state_value(request_handler, user):
+  """Composes the value for the 'state' parameter.
+
+  Packs the current request URI and an XSRF token into an opaque string that
+  can be passed to the authentication server via the 'state' parameter.
+
+  Args:
+    request_handler: webapp.RequestHandler, The request.
+    user: google.appengine.api.users.User, The current user.
+
+  Returns:
+    The state value as a string.
+  """
+  uri = request_handler.request.url
+  token = xsrfutil.generate_token(xsrf_secret_key(), user.user_id(),
+                                  action_id=str(uri))
+  return  uri + ':' + token
+
+
+def _parse_state_value(state, user):
+  """Parse the value of the 'state' parameter.
+
+  Parses the value and validates the XSRF token in the state parameter.
+
+  Args:
+    state: string, The value of the state parameter.
+    user: google.appengine.api.users.User, The current user.
+
+  Raises:
+    InvalidXsrfTokenError: if the XSRF token is invalid.
+
+  Returns:
+    The redirect URI.
+  """
+  uri, token = state.rsplit(':', 1)
+  if not xsrfutil.validate_token(xsrf_secret_key(), token, user.user_id(),
+                                 action_id=uri):
+    raise InvalidXsrfTokenError()
+
+  return uri
+
+
+class OAuth2Decorator(object):
+  """Utility for making OAuth 2.0 easier.
+
+  Instantiate and then use with oauth_required or oauth_aware
+  as decorators on webapp.RequestHandler methods.
+
+  Example:
+
+    decorator = OAuth2Decorator(
+        client_id='837...ent.com',
+        client_secret='Qh...wwI',
+        scope='https://www.googleapis.com/auth/plus')
+
+
+    class MainHandler(webapp.RequestHandler):
+
+      @decorator.oauth_required
+      def get(self):
+        http = decorator.http()
+        # http is authorized with the user's Credentials and can be used
+        # in API calls
+
+  """
+
+  @util.positional(4)
+  def __init__(self, client_id, client_secret, scope,
+               auth_uri=GOOGLE_AUTH_URI,
+               token_uri=GOOGLE_TOKEN_URI,
+               revoke_uri=GOOGLE_REVOKE_URI,
+               user_agent=None,
+               message=None,
+               callback_path='/oauth2callback',
+               token_response_param=None,
+               **kwargs):
+
+    """Constructor for OAuth2Decorator
+
+    Args:
+      client_id: string, client identifier.
+      client_secret: string client secret.
+      scope: string or iterable of strings, scope(s) of the credentials being
+        requested.
+      auth_uri: string, URI for authorization endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      token_uri: string, URI for token endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      revoke_uri: string, URI for revoke endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      user_agent: string, User agent of your application, default to None.
+      message: Message to display if there are problems with the OAuth 2.0
+        configuration. The message may contain HTML and will be presented on the
+        web interface for any method that uses the decorator.
+      callback_path: string, The absolute path to use as the callback URI. Note
+        that this must match up with the URI given when registering the
+        application in the APIs Console.
+      token_response_param: string. If provided, the full JSON response
+        to the access token request will be encoded and included in this query
+        parameter in the callback URI. This is useful with providers (e.g.
+        wordpress.com) that include extra fields that the client may want.
+      **kwargs: dict, Keyword arguments are be passed along as kwargs to the
+        OAuth2WebServerFlow constructor.
+    """
+    self.flow = None
+    self.credentials = None
+    self._client_id = client_id
+    self._client_secret = client_secret
+    self._scope = util.scopes_to_string(scope)
+    self._auth_uri = auth_uri
+    self._token_uri = token_uri
+    self._revoke_uri = revoke_uri
+    self._user_agent = user_agent
+    self._kwargs = kwargs
+    self._message = message
+    self._in_error = False
+    self._callback_path = callback_path
+    self._token_response_param = token_response_param
+
+  def _display_error_message(self, request_handler):
+    request_handler.response.out.write('<html><body>')
+    request_handler.response.out.write(_safe_html(self._message))
+    request_handler.response.out.write('</body></html>')
+
+  def oauth_required(self, method):
+    """Decorator that starts the OAuth 2.0 dance.
+
+    Starts the OAuth dance for the logged in user if they haven't already
+    granted access for this application.
+
+    Args:
+      method: callable, to be decorated method of a webapp.RequestHandler
+        instance.
+    """
+
+    def check_oauth(request_handler, *args, **kwargs):
+      if self._in_error:
+        self._display_error_message(request_handler)
+        return
+
+      user = users.get_current_user()
+      # Don't use @login_decorator as this could be used in a POST request.
+      if not user:
+        request_handler.redirect(users.create_login_url(
+            request_handler.request.uri))
+        return
+
+      self._create_flow(request_handler)
+
+      # Store the request URI in 'state' so we can use it later
+      self.flow.params['state'] = _build_state_value(request_handler, user)
+      self.credentials = StorageByKeyName(
+          CredentialsModel, user.user_id(), 'credentials').get()
+
+      if not self.has_credentials():
+        return request_handler.redirect(self.authorize_url())
+      try:
+        return method(request_handler, *args, **kwargs)
+      except AccessTokenRefreshError:
+        return request_handler.redirect(self.authorize_url())
+
+    return check_oauth
+
+  def _create_flow(self, request_handler):
+    """Create the Flow object.
+
+    The Flow is calculated lazily since we don't know where this app is
+    running until it receives a request, at which point redirect_uri can be
+    calculated and then the Flow object can be constructed.
+
+    Args:
+      request_handler: webapp.RequestHandler, the request handler.
+    """
+    if self.flow is None:
+      redirect_uri = request_handler.request.relative_url(
+          self._callback_path) # Usually /oauth2callback
+      self.flow = OAuth2WebServerFlow(self._client_id, self._client_secret,
+                                      self._scope, redirect_uri=redirect_uri,
+                                      user_agent=self._user_agent,
+                                      auth_uri=self._auth_uri,
+                                      token_uri=self._token_uri,
+                                      revoke_uri=self._revoke_uri,
+                                      **self._kwargs)
+
+  def oauth_aware(self, method):
+    """Decorator that sets up for OAuth 2.0 dance, but doesn't do it.
+
+    Does all the setup for the OAuth dance, but doesn't initiate it.
+    This decorator is useful if you want to create a page that knows
+    whether or not the user has granted access to this application.
+    From within a method decorated with @oauth_aware the has_credentials()
+    and authorize_url() methods can be called.
+
+    Args:
+      method: callable, to be decorated method of a webapp.RequestHandler
+        instance.
+    """
+
+    def setup_oauth(request_handler, *args, **kwargs):
+      if self._in_error:
+        self._display_error_message(request_handler)
+        return
+
+      user = users.get_current_user()
+      # Don't use @login_decorator as this could be used in a POST request.
+      if not user:
+        request_handler.redirect(users.create_login_url(
+            request_handler.request.uri))
+        return
+
+      self._create_flow(request_handler)
+
+      self.flow.params['state'] = _build_state_value(request_handler, user)
+      self.credentials = StorageByKeyName(
+          CredentialsModel, user.user_id(), 'credentials').get()
+      return method(request_handler, *args, **kwargs)
+    return setup_oauth
+
+  def has_credentials(self):
+    """True if for the logged in user there are valid access Credentials.
+
+    Must only be called from with a webapp.RequestHandler subclassed method
+    that had been decorated with either @oauth_required or @oauth_aware.
+    """
+    return self.credentials is not None and not self.credentials.invalid
+
+  def authorize_url(self):
+    """Returns the URL to start the OAuth dance.
+
+    Must only be called from with a webapp.RequestHandler subclassed method
+    that had been decorated with either @oauth_required or @oauth_aware.
+    """
+    url = self.flow.step1_get_authorize_url()
+    return str(url)
+
+  def http(self):
+    """Returns an authorized http instance.
+
+    Must only be called from within an @oauth_required decorated method, or
+    from within an @oauth_aware decorated method where has_credentials()
+    returns True.
+    """
+    return self.credentials.authorize(httplib2.Http())
+
+  @property
+  def callback_path(self):
+    """The absolute path where the callback will occur.
+
+    Note this is the absolute path, not the absolute URI, that will be
+    calculated by the decorator at runtime. See callback_handler() for how this
+    should be used.
+
+    Returns:
+      The callback path as a string.
+    """
+    return self._callback_path
+
+
+  def callback_handler(self):
+    """RequestHandler for the OAuth 2.0 redirect callback.
+
+    Usage:
+       app = webapp.WSGIApplication([
+         ('/index', MyIndexHandler),
+         ...,
+         (decorator.callback_path, decorator.callback_handler())
+       ])
+
+    Returns:
+      A webapp.RequestHandler that handles the redirect back from the
+      server during the OAuth 2.0 dance.
+    """
+    decorator = self
+
+    class OAuth2Handler(webapp.RequestHandler):
+      """Handler for the redirect_uri of the OAuth 2.0 dance."""
+
+      @login_required
+      def get(self):
+        error = self.request.get('error')
+        if error:
+          errormsg = self.request.get('error_description', error)
+          self.response.out.write(
+              'The authorization request failed: %s' % _safe_html(errormsg))
+        else:
+          user = users.get_current_user()
+          decorator._create_flow(self)
+          credentials = decorator.flow.step2_exchange(self.request.params)
+          StorageByKeyName(
+              CredentialsModel, user.user_id(), 'credentials').put(credentials)
+          redirect_uri = _parse_state_value(str(self.request.get('state')),
+                                            user)
+
+          if decorator._token_response_param and credentials.token_response:
+            resp_json = simplejson.dumps(credentials.token_response)
+            redirect_uri = util._add_query_parameter(
+              redirect_uri, decorator._token_response_param, resp_json)
+
+          self.redirect(redirect_uri)
+
+    return OAuth2Handler
+
+  def callback_application(self):
+    """WSGI application for handling the OAuth 2.0 redirect callback.
+
+    If you need finer grained control use `callback_handler` which returns just
+    the webapp.RequestHandler.
+
+    Returns:
+      A webapp.WSGIApplication that handles the redirect back from the
+      server during the OAuth 2.0 dance.
+    """
+    return webapp.WSGIApplication([
+        (self.callback_path, self.callback_handler())
+        ])
+
+
+class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
+  """An OAuth2Decorator that builds from a clientsecrets file.
+
+  Uses a clientsecrets file as the source for all the information when
+  constructing an OAuth2Decorator.
+
+  Example:
+
+    decorator = OAuth2DecoratorFromClientSecrets(
+      os.path.join(os.path.dirname(__file__), 'client_secrets.json')
+      scope='https://www.googleapis.com/auth/plus')
+
+
+    class MainHandler(webapp.RequestHandler):
+
+      @decorator.oauth_required
+      def get(self):
+        http = decorator.http()
+        # http is authorized with the user's Credentials and can be used
+        # in API calls
+  """
+
+  @util.positional(3)
+  def __init__(self, filename, scope, message=None, cache=None):
+    """Constructor
+
+    Args:
+      filename: string, File name of client secrets.
+      scope: string or iterable of strings, scope(s) of the credentials being
+        requested.
+      message: string, A friendly string to display to the user if the
+        clientsecrets file is missing or invalid. The message may contain HTML
+        and will be presented on the web interface for any method that uses the
+        decorator.
+      cache: An optional cache service client that implements get() and set()
+        methods. See clientsecrets.loadfile() for details.
+    """
+    client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
+    if client_type not in [
+        clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]:
+      raise InvalidClientSecretsError(
+          'OAuth2Decorator doesn\'t support this OAuth 2.0 flow.')
+    constructor_kwargs = {
+      'auth_uri': client_info['auth_uri'],
+      'token_uri': client_info['token_uri'],
+      'message': message,
+    }
+    revoke_uri = client_info.get('revoke_uri')
+    if revoke_uri is not None:
+      constructor_kwargs['revoke_uri'] = revoke_uri
+    super(OAuth2DecoratorFromClientSecrets, self).__init__(
+        client_info['client_id'], client_info['client_secret'],
+        scope, **constructor_kwargs)
+    if message is not None:
+      self._message = message
+    else:
+      self._message = 'Please configure your application for OAuth 2.0.'
+
+
+@util.positional(2)
+def oauth2decorator_from_clientsecrets(filename, scope,
+                                       message=None, cache=None):
+  """Creates an OAuth2Decorator populated from a clientsecrets file.
+
+  Args:
+    filename: string, File name of client secrets.
+    scope: string or list of strings, scope(s) of the credentials being
+      requested.
+    message: string, A friendly string to display to the user if the
+      clientsecrets file is missing or invalid. The message may contain HTML and
+      will be presented on the web interface for any method that uses the
+      decorator.
+    cache: An optional cache service client that implements get() and set()
+      methods. See clientsecrets.loadfile() for details.
+
+  Returns: An OAuth2Decorator
+
+  """
+  return OAuth2DecoratorFromClientSecrets(filename, scope,
+                                          message=message, cache=cache)
diff --git a/oauth2client/client.py b/oauth2client/client.py
new file mode 100644
index 0000000..6b580a0
--- /dev/null
+++ b/oauth2client/client.py
@@ -0,0 +1,1364 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""An OAuth 2.0 client.
+
+Tools for interacting with OAuth 2.0 protected resources.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import base64
+import clientsecrets
+import copy
+import datetime
+import httplib2
+import logging
+import os
+import sys
+import time
+import urllib
+import urlparse
+
+from oauth2client import GOOGLE_AUTH_URI
+from oauth2client import GOOGLE_REVOKE_URI
+from oauth2client import GOOGLE_TOKEN_URI
+from oauth2client import util
+from oauth2client.anyjson import simplejson
+
+HAS_OPENSSL = False
+HAS_CRYPTO = False
+try:
+  from oauth2client import crypt
+  HAS_CRYPTO = True
+  if crypt.OpenSSLVerifier is not None:
+    HAS_OPENSSL = True
+except ImportError:
+  pass
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+logger = logging.getLogger(__name__)
+
+# Expiry is stored in RFC3339 UTC format
+EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+# Which certs to use to validate id_tokens received.
+ID_TOKEN_VERIFICATON_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
+
+# Constant to use for the out of band OAuth 2.0 flow.
+OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob'
+
+# Google Data client libraries may need to set this to [401, 403].
+REFRESH_STATUS_CODES = [401]
+
+
+class Error(Exception):
+  """Base error for this module."""
+
+
+class FlowExchangeError(Error):
+  """Error trying to exchange an authorization grant for an access token."""
+
+
+class AccessTokenRefreshError(Error):
+  """Error trying to refresh an expired access token."""
+
+
+class TokenRevokeError(Error):
+  """Error trying to revoke a token."""
+
+
+class UnknownClientSecretsFlowError(Error):
+  """The client secrets file called for an unknown type of OAuth 2.0 flow. """
+
+
+class AccessTokenCredentialsError(Error):
+  """Having only the access_token means no refresh is possible."""
+
+
+class VerifyJwtTokenError(Error):
+  """Could on retrieve certificates for validation."""
+
+
+class NonAsciiHeaderError(Error):
+  """Header names and values must be ASCII strings."""
+
+
+def _abstract():
+  raise NotImplementedError('You need to override this function')
+
+
+class MemoryCache(object):
+  """httplib2 Cache implementation which only caches locally."""
+
+  def __init__(self):
+    self.cache = {}
+
+  def get(self, key):
+    return self.cache.get(key)
+
+  def set(self, key, value):
+    self.cache[key] = value
+
+  def delete(self, key):
+    self.cache.pop(key, None)
+
+
+class Credentials(object):
+  """Base class for all Credentials objects.
+
+  Subclasses must define an authorize() method that applies the credentials to
+  an HTTP transport.
+
+  Subclasses must also specify a classmethod named 'from_json' that takes a JSON
+  string as input and returns an instaniated Credentials object.
+  """
+
+  NON_SERIALIZED_MEMBERS = ['store']
+
+  def authorize(self, http):
+    """Take an httplib2.Http instance (or equivalent) and authorizes it.
+
+    Authorizes it for the set of credentials, usually by replacing
+    http.request() with a method that adds in the appropriate headers and then
+    delegates to the original Http.request() method.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the refresh
+        request.
+    """
+    _abstract()
+
+  def refresh(self, http):
+    """Forces a refresh of the access_token.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the refresh
+        request.
+    """
+    _abstract()
+
+  def revoke(self, http):
+    """Revokes a refresh_token and makes the credentials void.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the revoke
+        request.
+    """
+    _abstract()
+
+  def apply(self, headers):
+    """Add the authorization to the headers.
+
+    Args:
+      headers: dict, the headers to add the Authorization header to.
+    """
+    _abstract()
+
+  def _to_json(self, strip):
+    """Utility function that creates JSON repr. of a Credentials object.
+
+    Args:
+      strip: array, An array of names of members to not include in the JSON.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    t = type(self)
+    d = copy.copy(self.__dict__)
+    for member in strip:
+      if member in d:
+        del d[member]
+    if 'token_expiry' in d and isinstance(d['token_expiry'], datetime.datetime):
+      d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
+    # Add in information we will need later to reconsistitue this instance.
+    d['_class'] = t.__name__
+    d['_module'] = t.__module__
+    return simplejson.dumps(d)
+
+  def to_json(self):
+    """Creating a JSON representation of an instance of Credentials.
+
+    Returns:
+       string, a JSON representation of this instance, suitable to pass to
+       from_json().
+    """
+    return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
+
+  @classmethod
+  def new_from_json(cls, s):
+    """Utility class method to instantiate a Credentials subclass from a JSON
+    representation produced by to_json().
+
+    Args:
+      s: string, JSON from to_json().
+
+    Returns:
+      An instance of the subclass of Credentials that was serialized with
+      to_json().
+    """
+    data = simplejson.loads(s)
+    # Find and call the right classmethod from_json() to restore the object.
+    module = data['_module']
+    try:
+      m = __import__(module)
+    except ImportError:
+      # In case there's an object from the old package structure, update it
+      module = module.replace('.apiclient', '')
+      m = __import__(module)
+
+    m = __import__(module, fromlist=module.split('.')[:-1])
+    kls = getattr(m, data['_class'])
+    from_json = getattr(kls, 'from_json')
+    return from_json(s)
+
+  @classmethod
+  def from_json(cls, s):
+    """Instantiate a Credentials object from a JSON description of it.
+
+    The JSON should have been produced by calling .to_json() on the object.
+
+    Args:
+      data: dict, A deserialized JSON object.
+
+    Returns:
+      An instance of a Credentials subclass.
+    """
+    return Credentials()
+
+
+class Flow(object):
+  """Base class for all Flow objects."""
+  pass
+
+
+class Storage(object):
+  """Base class for all Storage objects.
+
+  Store and retrieve a single credential. This class supports locking
+  such that multiple processes and threads can operate on a single
+  store.
+  """
+
+  def acquire_lock(self):
+    """Acquires any lock necessary to access this Storage.
+
+    This lock is not reentrant.
+    """
+    pass
+
+  def release_lock(self):
+    """Release the Storage lock.
+
+    Trying to release a lock that isn't held will result in a
+    RuntimeError.
+    """
+    pass
+
+  def locked_get(self):
+    """Retrieve credential.
+
+    The Storage lock must be held when this is called.
+
+    Returns:
+      oauth2client.client.Credentials
+    """
+    _abstract()
+
+  def locked_put(self, credentials):
+    """Write a credential.
+
+    The Storage lock must be held when this is called.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    _abstract()
+
+  def locked_delete(self):
+    """Delete a credential.
+
+    The Storage lock must be held when this is called.
+    """
+    _abstract()
+
+  def get(self):
+    """Retrieve credential.
+
+    The Storage lock must *not* be held when this is called.
+
+    Returns:
+      oauth2client.client.Credentials
+    """
+    self.acquire_lock()
+    try:
+      return self.locked_get()
+    finally:
+      self.release_lock()
+
+  def put(self, credentials):
+    """Write a credential.
+
+    The Storage lock must be held when this is called.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    self.acquire_lock()
+    try:
+      self.locked_put(credentials)
+    finally:
+      self.release_lock()
+
+  def delete(self):
+    """Delete credential.
+
+    Frees any resources associated with storing the credential.
+    The Storage lock must *not* be held when this is called.
+
+    Returns:
+      None
+    """
+    self.acquire_lock()
+    try:
+      return self.locked_delete()
+    finally:
+      self.release_lock()
+
+
+def clean_headers(headers):
+  """Forces header keys and values to be strings, i.e not unicode.
+
+  The httplib module just concats the header keys and values in a way that may
+  make the message header a unicode string, which, if it then tries to
+  contatenate to a binary request body may result in a unicode decode error.
+
+  Args:
+    headers: dict, A dictionary of headers.
+
+  Returns:
+    The same dictionary but with all the keys converted to strings.
+  """
+  clean = {}
+  try:
+    for k, v in headers.iteritems():
+      clean[str(k)] = str(v)
+  except UnicodeEncodeError:
+    raise NonAsciiHeaderError(k + ': ' + v)
+  return clean
+
+
+def _update_query_params(uri, params):
+  """Updates a URI with new query parameters.
+
+  Args:
+    uri: string, A valid URI, with potential existing query parameters.
+    params: dict, A dictionary of query parameters.
+
+  Returns:
+    The same URI but with the new query parameters added.
+  """
+  parts = list(urlparse.urlparse(uri))
+  query_params = dict(parse_qsl(parts[4])) # 4 is the index of the query part
+  query_params.update(params)
+  parts[4] = urllib.urlencode(query_params)
+  return urlparse.urlunparse(parts)
+
+
+class OAuth2Credentials(Credentials):
+  """Credentials object for OAuth 2.0.
+
+  Credentials can be applied to an httplib2.Http object using the authorize()
+  method, which then adds the OAuth 2.0 access token to each request.
+
+  OAuth2Credentials objects may be safely pickled and unpickled.
+  """
+
+  @util.positional(8)
+  def __init__(self, access_token, client_id, client_secret, refresh_token,
+               token_expiry, token_uri, user_agent, revoke_uri=None,
+               id_token=None, token_response=None):
+    """Create an instance of OAuth2Credentials.
+
+    This constructor is not usually called by the user, instead
+    OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
+
+    Args:
+      access_token: string, access token.
+      client_id: string, client identifier.
+      client_secret: string, client secret.
+      refresh_token: string, refresh token.
+      token_expiry: datetime, when the access_token expires.
+      token_uri: string, URI of token endpoint.
+      user_agent: string, The HTTP User-Agent to provide for this application.
+      revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
+        can't be revoked if this is None.
+      id_token: object, The identity of the resource owner.
+      token_response: dict, the decoded response to the token request. None
+        if a token hasn't been requested yet. Stored because some providers
+        (e.g. wordpress.com) include extra fields that clients may want.
+
+    Notes:
+      store: callable, A callable that when passed a Credential
+        will store the credential back to where it came from.
+        This is needed to store the latest access_token if it
+        has expired and been refreshed.
+    """
+    self.access_token = access_token
+    self.client_id = client_id
+    self.client_secret = client_secret
+    self.refresh_token = refresh_token
+    self.store = None
+    self.token_expiry = token_expiry
+    self.token_uri = token_uri
+    self.user_agent = user_agent
+    self.revoke_uri = revoke_uri
+    self.id_token = id_token
+    self.token_response = token_response
+
+    # True if the credentials have been revoked or expired and can't be
+    # refreshed.
+    self.invalid = False
+
+  def authorize(self, http):
+    """Authorize an httplib2.Http instance with these credentials.
+
+    The modified http.request method will add authentication headers to each
+    request and will refresh access_tokens when a 401 is received on a
+    request. In addition the http.request method has a credentials property,
+    http.request.credentials, which is the Credentials object that authorized
+    it.
+
+    Args:
+       http: An instance of httplib2.Http
+         or something that acts like it.
+
+    Returns:
+       A modified instance of http that was passed in.
+
+    Example:
+
+      h = httplib2.Http()
+      h = credentials.authorize(h)
+
+    You can't create a new OAuth subclass of httplib2.Authenication
+    because it never gets passed the absolute URI, which is needed for
+    signing. So instead we have to overload 'request' with a closure
+    that adds in the Authorization header and then calls the original
+    version of 'request()'.
+    """
+    request_orig = http.request
+
+    # The closure that will replace 'httplib2.Http.request'.
+    @util.positional(1)
+    def new_request(uri, method='GET', body=None, headers=None,
+                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                    connection_type=None):
+      if not self.access_token:
+        logger.info('Attempting refresh to obtain initial access_token')
+        self._refresh(request_orig)
+
+      # Modify the request headers to add the appropriate
+      # Authorization header.
+      if headers is None:
+        headers = {}
+      self.apply(headers)
+
+      if self.user_agent is not None:
+        if 'user-agent' in headers:
+          headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
+        else:
+          headers['user-agent'] = self.user_agent
+
+      resp, content = request_orig(uri, method, body, clean_headers(headers),
+                                   redirections, connection_type)
+
+      if resp.status in REFRESH_STATUS_CODES:
+        logger.info('Refreshing due to a %s' % str(resp.status))
+        self._refresh(request_orig)
+        self.apply(headers)
+        return request_orig(uri, method, body, clean_headers(headers),
+                            redirections, connection_type)
+      else:
+        return (resp, content)
+
+    # Replace the request method with our own closure.
+    http.request = new_request
+
+    # Set credentials as a property of the request method.
+    setattr(http.request, 'credentials', self)
+
+    return http
+
+  def refresh(self, http):
+    """Forces a refresh of the access_token.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the refresh
+        request.
+    """
+    self._refresh(http.request)
+
+  def revoke(self, http):
+    """Revokes a refresh_token and makes the credentials void.
+
+    Args:
+      http: httplib2.Http, an http object to be used to make the revoke
+        request.
+    """
+    self._revoke(http.request)
+
+  def apply(self, headers):
+    """Add the authorization to the headers.
+
+    Args:
+      headers: dict, the headers to add the Authorization header to.
+    """
+    headers['Authorization'] = 'Bearer ' + self.access_token
+
+  def to_json(self):
+    return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
+
+  @classmethod
+  def from_json(cls, s):
+    """Instantiate a Credentials object from a JSON description of it. The JSON
+    should have been produced by calling .to_json() on the object.
+
+    Args:
+      data: dict, A deserialized JSON object.
+
+    Returns:
+      An instance of a Credentials subclass.
+    """
+    data = simplejson.loads(s)
+    if 'token_expiry' in data and not isinstance(data['token_expiry'],
+        datetime.datetime):
+      try:
+        data['token_expiry'] = datetime.datetime.strptime(
+            data['token_expiry'], EXPIRY_FORMAT)
+      except:
+        data['token_expiry'] = None
+    retval = cls(
+        data['access_token'],
+        data['client_id'],
+        data['client_secret'],
+        data['refresh_token'],
+        data['token_expiry'],
+        data['token_uri'],
+        data['user_agent'],
+        revoke_uri=data.get('revoke_uri', None),
+        id_token=data.get('id_token', None),
+        token_response=data.get('token_response', None))
+    retval.invalid = data['invalid']
+    return retval
+
+  @property
+  def access_token_expired(self):
+    """True if the credential is expired or invalid.
+
+    If the token_expiry isn't set, we assume the token doesn't expire.
+    """
+    if self.invalid:
+      return True
+
+    if not self.token_expiry:
+      return False
+
+    now = datetime.datetime.utcnow()
+    if now >= self.token_expiry:
+      logger.info('access_token is expired. Now: %s, token_expiry: %s',
+                  now, self.token_expiry)
+      return True
+    return False
+
+  def set_store(self, store):
+    """Set the Storage for the credential.
+
+    Args:
+      store: Storage, an implementation of Stroage object.
+        This is needed to store the latest access_token if it
+        has expired and been refreshed. This implementation uses
+        locking to check for updates before updating the
+        access_token.
+    """
+    self.store = store
+
+  def _updateFromCredential(self, other):
+    """Update this Credential from another instance."""
+    self.__dict__.update(other.__getstate__())
+
+  def __getstate__(self):
+    """Trim the state down to something that can be pickled."""
+    d = copy.copy(self.__dict__)
+    del d['store']
+    return d
+
+  def __setstate__(self, state):
+    """Reconstitute the state of the object from being pickled."""
+    self.__dict__.update(state)
+    self.store = None
+
+  def _generate_refresh_request_body(self):
+    """Generate the body that will be used in the refresh request."""
+    body = urllib.urlencode({
+        'grant_type': 'refresh_token',
+        'client_id': self.client_id,
+        'client_secret': self.client_secret,
+        'refresh_token': self.refresh_token,
+        })
+    return body
+
+  def _generate_refresh_request_headers(self):
+    """Generate the headers that will be used in the refresh request."""
+    headers = {
+        'content-type': 'application/x-www-form-urlencoded',
+    }
+
+    if self.user_agent is not None:
+      headers['user-agent'] = self.user_agent
+
+    return headers
+
+  def _refresh(self, http_request):
+    """Refreshes the access_token.
+
+    This method first checks by reading the Storage object if available.
+    If a refresh is still needed, it holds the Storage lock until the
+    refresh is completed.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the refresh request.
+
+    Raises:
+      AccessTokenRefreshError: When the refresh fails.
+    """
+    if not self.store:
+      self._do_refresh_request(http_request)
+    else:
+      self.store.acquire_lock()
+      try:
+        new_cred = self.store.locked_get()
+        if (new_cred and not new_cred.invalid and
+            new_cred.access_token != self.access_token):
+          logger.info('Updated access_token read from Storage')
+          self._updateFromCredential(new_cred)
+        else:
+          self._do_refresh_request(http_request)
+      finally:
+        self.store.release_lock()
+
+  def _do_refresh_request(self, http_request):
+    """Refresh the access_token using the refresh_token.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the refresh request.
+
+    Raises:
+      AccessTokenRefreshError: When the refresh fails.
+    """
+    body = self._generate_refresh_request_body()
+    headers = self._generate_refresh_request_headers()
+
+    logger.info('Refreshing access_token')
+    resp, content = http_request(
+        self.token_uri, method='POST', body=body, headers=headers)
+    if resp.status == 200:
+      # TODO(jcgregorio) Raise an error if loads fails?
+      d = simplejson.loads(content)
+      self.token_response = d
+      self.access_token = d['access_token']
+      self.refresh_token = d.get('refresh_token', self.refresh_token)
+      if 'expires_in' in d:
+        self.token_expiry = datetime.timedelta(
+            seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
+      else:
+        self.token_expiry = None
+      if self.store:
+        self.store.locked_put(self)
+    else:
+      # An {'error':...} response body means the token is expired or revoked,
+      # so we flag the credentials as such.
+      logger.info('Failed to retrieve access token: %s' % content)
+      error_msg = 'Invalid response %s.' % resp['status']
+      try:
+        d = simplejson.loads(content)
+        if 'error' in d:
+          error_msg = d['error']
+          self.invalid = True
+          if self.store:
+            self.store.locked_put(self)
+      except StandardError:
+        pass
+      raise AccessTokenRefreshError(error_msg)
+
+  def _revoke(self, http_request):
+    """Revokes the refresh_token and deletes the store if available.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the revoke request.
+    """
+    self._do_revoke(http_request, self.refresh_token)
+
+  def _do_revoke(self, http_request, token):
+    """Revokes the credentials and deletes the store if available.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the refresh request.
+      token: A string used as the token to be revoked. Can be either an
+        access_token or refresh_token.
+
+    Raises:
+      TokenRevokeError: If the revoke request does not return with a 200 OK.
+    """
+    logger.info('Revoking token')
+    query_params = {'token': token}
+    token_revoke_uri = _update_query_params(self.revoke_uri, query_params)
+    resp, content = http_request(token_revoke_uri)
+    if resp.status == 200:
+      self.invalid = True
+    else:
+      error_msg = 'Invalid response %s.' % resp.status
+      try:
+        d = simplejson.loads(content)
+        if 'error' in d:
+          error_msg = d['error']
+      except StandardError:
+        pass
+      raise TokenRevokeError(error_msg)
+
+    if self.store:
+      self.store.delete()
+
+
+class AccessTokenCredentials(OAuth2Credentials):
+  """Credentials object for OAuth 2.0.
+
+  Credentials can be applied to an httplib2.Http object using the
+  authorize() method, which then signs each request from that object
+  with the OAuth 2.0 access token. This set of credentials is for the
+  use case where you have acquired an OAuth 2.0 access_token from
+  another place such as a JavaScript client or another web
+  application, and wish to use it from Python. Because only the
+  access_token is present it can not be refreshed and will in time
+  expire.
+
+  AccessTokenCredentials objects may be safely pickled and unpickled.
+
+  Usage:
+    credentials = AccessTokenCredentials('<an access token>',
+      'my-user-agent/1.0')
+    http = httplib2.Http()
+    http = credentials.authorize(http)
+
+  Exceptions:
+    AccessTokenCredentialsExpired: raised when the access_token expires or is
+      revoked.
+  """
+
+  def __init__(self, access_token, user_agent, revoke_uri=None):
+    """Create an instance of OAuth2Credentials
+
+    This is one of the few types if Credentials that you should contrust,
+    Credentials objects are usually instantiated by a Flow.
+
+    Args:
+      access_token: string, access token.
+      user_agent: string, The HTTP User-Agent to provide for this application.
+      revoke_uri: string, URI for revoke endpoint. Defaults to None; a token
+        can't be revoked if this is None.
+    """
+    super(AccessTokenCredentials, self).__init__(
+        access_token,
+        None,
+        None,
+        None,
+        None,
+        None,
+        user_agent,
+        revoke_uri=revoke_uri)
+
+
+  @classmethod
+  def from_json(cls, s):
+    data = simplejson.loads(s)
+    retval = AccessTokenCredentials(
+        data['access_token'],
+        data['user_agent'])
+    return retval
+
+  def _refresh(self, http_request):
+    raise AccessTokenCredentialsError(
+        'The access_token is expired or invalid and can\'t be refreshed.')
+
+  def _revoke(self, http_request):
+    """Revokes the access_token and deletes the store if available.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the revoke request.
+    """
+    self._do_revoke(http_request, self.access_token)
+
+
+class AssertionCredentials(OAuth2Credentials):
+  """Abstract Credentials object used for OAuth 2.0 assertion grants.
+
+  This credential does not require a flow to instantiate because it
+  represents a two legged flow, and therefore has all of the required
+  information to generate and refresh its own access tokens. It must
+  be subclassed to generate the appropriate assertion string.
+
+  AssertionCredentials objects may be safely pickled and unpickled.
+  """
+
+  @util.positional(2)
+  def __init__(self, assertion_type, user_agent=None,
+               token_uri=GOOGLE_TOKEN_URI,
+               revoke_uri=GOOGLE_REVOKE_URI,
+               **unused_kwargs):
+    """Constructor for AssertionFlowCredentials.
+
+    Args:
+      assertion_type: string, assertion type that will be declared to the auth
+        server
+      user_agent: string, The HTTP User-Agent to provide for this application.
+      token_uri: string, URI for token endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      revoke_uri: string, URI for revoke endpoint.
+    """
+    super(AssertionCredentials, self).__init__(
+        None,
+        None,
+        None,
+        None,
+        None,
+        token_uri,
+        user_agent,
+        revoke_uri=revoke_uri)
+    self.assertion_type = assertion_type
+
+  def _generate_refresh_request_body(self):
+    assertion = self._generate_assertion()
+
+    body = urllib.urlencode({
+        'assertion': assertion,
+        'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
+        })
+
+    return body
+
+  def _generate_assertion(self):
+    """Generate the assertion string that will be used in the access token
+    request.
+    """
+    _abstract()
+
+  def _revoke(self, http_request):
+    """Revokes the access_token and deletes the store if available.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the revoke request.
+    """
+    self._do_revoke(http_request, self.access_token)
+
+
+if HAS_CRYPTO:
+  # PyOpenSSL and PyCrypto are not prerequisites for oauth2client, so if it is
+  # missing then don't create the SignedJwtAssertionCredentials or the
+  # verify_id_token() method.
+
+  class SignedJwtAssertionCredentials(AssertionCredentials):
+    """Credentials object used for OAuth 2.0 Signed JWT assertion grants.
+
+    This credential does not require a flow to instantiate because it represents
+    a two legged flow, and therefore has all of the required information to
+    generate and refresh its own access tokens.
+
+    SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto 2.6 or
+    later. For App Engine you may also consider using AppAssertionCredentials.
+    """
+
+    MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
+
+    @util.positional(4)
+    def __init__(self,
+        service_account_name,
+        private_key,
+        scope,
+        private_key_password='notasecret',
+        user_agent=None,
+        token_uri=GOOGLE_TOKEN_URI,
+        revoke_uri=GOOGLE_REVOKE_URI,
+        **kwargs):
+      """Constructor for SignedJwtAssertionCredentials.
+
+      Args:
+        service_account_name: string, id for account, usually an email address.
+        private_key: string, private key in PKCS12 or PEM format.
+        scope: string or iterable of strings, scope(s) of the credentials being
+          requested.
+        private_key_password: string, password for private_key, unused if
+          private_key is in PEM format.
+        user_agent: string, HTTP User-Agent to provide for this application.
+        token_uri: string, URI for token endpoint. For convenience
+          defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+        revoke_uri: string, URI for revoke endpoint.
+        kwargs: kwargs, Additional parameters to add to the JWT token, for
+          example prn=joe@xample.org."""
+
+      super(SignedJwtAssertionCredentials, self).__init__(
+          None,
+          user_agent=user_agent,
+          token_uri=token_uri,
+          revoke_uri=revoke_uri,
+          )
+
+      self.scope = util.scopes_to_string(scope)
+
+      # Keep base64 encoded so it can be stored in JSON.
+      self.private_key = base64.b64encode(private_key)
+
+      self.private_key_password = private_key_password
+      self.service_account_name = service_account_name
+      self.kwargs = kwargs
+
+    @classmethod
+    def from_json(cls, s):
+      data = simplejson.loads(s)
+      retval = SignedJwtAssertionCredentials(
+          data['service_account_name'],
+          base64.b64decode(data['private_key']),
+          data['scope'],
+          private_key_password=data['private_key_password'],
+          user_agent=data['user_agent'],
+          token_uri=data['token_uri'],
+          **data['kwargs']
+          )
+      retval.invalid = data['invalid']
+      retval.access_token = data['access_token']
+      return retval
+
+    def _generate_assertion(self):
+      """Generate the assertion that will be used in the request."""
+      now = long(time.time())
+      payload = {
+          'aud': self.token_uri,
+          'scope': self.scope,
+          'iat': now,
+          'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
+          'iss': self.service_account_name
+      }
+      payload.update(self.kwargs)
+      logger.debug(str(payload))
+
+      private_key = base64.b64decode(self.private_key)
+      return crypt.make_signed_jwt(crypt.Signer.from_string(
+          private_key, self.private_key_password), payload)
+
+  # Only used in verify_id_token(), which is always calling to the same URI
+  # for the certs.
+  _cached_http = httplib2.Http(MemoryCache())
+
+  @util.positional(2)
+  def verify_id_token(id_token, audience, http=None,
+      cert_uri=ID_TOKEN_VERIFICATON_CERTS):
+    """Verifies a signed JWT id_token.
+
+    This function requires PyOpenSSL and because of that it does not work on
+    App Engine.
+
+    Args:
+      id_token: string, A Signed JWT.
+      audience: string, The audience 'aud' that the token should be for.
+      http: httplib2.Http, instance to use to make the HTTP request. Callers
+        should supply an instance that has caching enabled.
+      cert_uri: string, URI of the certificates in JSON format to
+        verify the JWT against.
+
+    Returns:
+      The deserialized JSON in the JWT.
+
+    Raises:
+      oauth2client.crypt.AppIdentityError if the JWT fails to verify.
+    """
+    if http is None:
+      http = _cached_http
+
+    resp, content = http.request(cert_uri)
+
+    if resp.status == 200:
+      certs = simplejson.loads(content)
+      return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
+    else:
+      raise VerifyJwtTokenError('Status code: %d' % resp.status)
+
+
+def _urlsafe_b64decode(b64string):
+  # Guard against unicode strings, which base64 can't handle.
+  b64string = b64string.encode('ascii')
+  padded = b64string + '=' * (4 - len(b64string) % 4)
+  return base64.urlsafe_b64decode(padded)
+
+
+def _extract_id_token(id_token):
+  """Extract the JSON payload from a JWT.
+
+  Does the extraction w/o checking the signature.
+
+  Args:
+    id_token: string, OAuth 2.0 id_token.
+
+  Returns:
+    object, The deserialized JSON payload.
+  """
+  segments = id_token.split('.')
+
+  if (len(segments) != 3):
+    raise VerifyJwtTokenError(
+      'Wrong number of segments in token: %s' % id_token)
+
+  return simplejson.loads(_urlsafe_b64decode(segments[1]))
+
+
+def _parse_exchange_token_response(content):
+  """Parses response of an exchange token request.
+
+  Most providers return JSON but some (e.g. Facebook) return a
+  url-encoded string.
+
+  Args:
+    content: The body of a response
+
+  Returns:
+    Content as a dictionary object. Note that the dict could be empty,
+    i.e. {}. That basically indicates a failure.
+  """
+  resp = {}
+  try:
+    resp = simplejson.loads(content)
+  except StandardError:
+    # different JSON libs raise different exceptions,
+    # so we just do a catch-all here
+    resp = dict(parse_qsl(content))
+
+  # some providers respond with 'expires', others with 'expires_in'
+  if resp and 'expires' in resp:
+    resp['expires_in'] = resp.pop('expires')
+
+  return resp
+
+
+@util.positional(4)
+def credentials_from_code(client_id, client_secret, scope, code,
+                          redirect_uri='postmessage', http=None,
+                          user_agent=None, token_uri=GOOGLE_TOKEN_URI,
+                          auth_uri=GOOGLE_AUTH_URI,
+                          revoke_uri=GOOGLE_REVOKE_URI):
+  """Exchanges an authorization code for an OAuth2Credentials object.
+
+  Args:
+    client_id: string, client identifier.
+    client_secret: string, client secret.
+    scope: string or iterable of strings, scope(s) to request.
+    code: string, An authroization code, most likely passed down from
+      the client
+    redirect_uri: string, this is generally set to 'postmessage' to match the
+      redirect_uri that the client specified
+    http: httplib2.Http, optional http instance to use to do the fetch
+    token_uri: string, URI for token endpoint. For convenience
+      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+    auth_uri: string, URI for authorization endpoint. For convenience
+      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+    revoke_uri: string, URI for revoke endpoint. For convenience
+      defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+
+  Returns:
+    An OAuth2Credentials object.
+
+  Raises:
+    FlowExchangeError if the authorization code cannot be exchanged for an
+     access token
+  """
+  flow = OAuth2WebServerFlow(client_id, client_secret, scope,
+                             redirect_uri=redirect_uri, user_agent=user_agent,
+                             auth_uri=auth_uri, token_uri=token_uri,
+                             revoke_uri=revoke_uri)
+
+  credentials = flow.step2_exchange(code, http=http)
+  return credentials
+
+
+@util.positional(3)
+def credentials_from_clientsecrets_and_code(filename, scope, code,
+                                            message = None,
+                                            redirect_uri='postmessage',
+                                            http=None,
+                                            cache=None):
+  """Returns OAuth2Credentials from a clientsecrets file and an auth code.
+
+  Will create the right kind of Flow based on the contents of the clientsecrets
+  file or will raise InvalidClientSecretsError for unknown types of Flows.
+
+  Args:
+    filename: string, File name of clientsecrets.
+    scope: string or iterable of strings, scope(s) to request.
+    code: string, An authorization code, most likely passed down from
+      the client
+    message: string, A friendly string to display to the user if the
+      clientsecrets file is missing or invalid. If message is provided then
+      sys.exit will be called in the case of an error. If message in not
+      provided then clientsecrets.InvalidClientSecretsError will be raised.
+    redirect_uri: string, this is generally set to 'postmessage' to match the
+      redirect_uri that the client specified
+    http: httplib2.Http, optional http instance to use to do the fetch
+    cache: An optional cache service client that implements get() and set()
+      methods. See clientsecrets.loadfile() for details.
+
+  Returns:
+    An OAuth2Credentials object.
+
+  Raises:
+    FlowExchangeError if the authorization code cannot be exchanged for an
+     access token
+    UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
+    clientsecrets.InvalidClientSecretsError if the clientsecrets file is
+      invalid.
+  """
+  flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache,
+                                 redirect_uri=redirect_uri)
+  credentials = flow.step2_exchange(code, http=http)
+  return credentials
+
+
+class OAuth2WebServerFlow(Flow):
+  """Does the Web Server Flow for OAuth 2.0.
+
+  OAuth2WebServerFlow objects may be safely pickled and unpickled.
+  """
+
+  @util.positional(4)
+  def __init__(self, client_id, client_secret, scope,
+               redirect_uri=None,
+               user_agent=None,
+               auth_uri=GOOGLE_AUTH_URI,
+               token_uri=GOOGLE_TOKEN_URI,
+               revoke_uri=GOOGLE_REVOKE_URI,
+               **kwargs):
+    """Constructor for OAuth2WebServerFlow.
+
+    The kwargs argument is used to set extra query parameters on the
+    auth_uri. For example, the access_type and approval_prompt
+    query parameters can be set via kwargs.
+
+    Args:
+      client_id: string, client identifier.
+      client_secret: string client secret.
+      scope: string or iterable of strings, scope(s) of the credentials being
+        requested.
+      redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
+        a non-web-based application, or a URI that handles the callback from
+        the authorization server.
+      user_agent: string, HTTP User-Agent to provide for this application.
+      auth_uri: string, URI for authorization endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      token_uri: string, URI for token endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      revoke_uri: string, URI for revoke endpoint. For convenience
+        defaults to Google's endpoints but any OAuth 2.0 provider can be used.
+      **kwargs: dict, The keyword arguments are all optional and required
+                        parameters for the OAuth calls.
+    """
+    self.client_id = client_id
+    self.client_secret = client_secret
+    self.scope = util.scopes_to_string(scope)
+    self.redirect_uri = redirect_uri
+    self.user_agent = user_agent
+    self.auth_uri = auth_uri
+    self.token_uri = token_uri
+    self.revoke_uri = revoke_uri
+    self.params = {
+        'access_type': 'offline',
+        'response_type': 'code',
+    }
+    self.params.update(kwargs)
+
+  @util.positional(1)
+  def step1_get_authorize_url(self, redirect_uri=None):
+    """Returns a URI to redirect to the provider.
+
+    Args:
+      redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
+        a non-web-based application, or a URI that handles the callback from
+        the authorization server. This parameter is deprecated, please move to
+        passing the redirect_uri in via the constructor.
+
+    Returns:
+      A URI as a string to redirect the user to begin the authorization flow.
+    """
+    if redirect_uri is not None:
+      logger.warning(('The redirect_uri parameter for'
+          'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. Please'
+          'move to passing the redirect_uri in via the constructor.'))
+      self.redirect_uri = redirect_uri
+
+    if self.redirect_uri is None:
+      raise ValueError('The value of redirect_uri must not be None.')
+
+    query_params = {
+        'client_id': self.client_id,
+        'redirect_uri': self.redirect_uri,
+        'scope': self.scope,
+    }
+    query_params.update(self.params)
+    return _update_query_params(self.auth_uri, query_params)
+
+  @util.positional(2)
+  def step2_exchange(self, code, http=None):
+    """Exhanges a code for OAuth2Credentials.
+
+    Args:
+      code: string or dict, either the code as a string, or a dictionary
+        of the query parameters to the redirect_uri, which contains
+        the code.
+      http: httplib2.Http, optional http instance to use to do the fetch
+
+    Returns:
+      An OAuth2Credentials object that can be used to authorize requests.
+
+    Raises:
+      FlowExchangeError if a problem occured exchanging the code for a
+      refresh_token.
+    """
+
+    if not (isinstance(code, str) or isinstance(code, unicode)):
+      if 'code' not in code:
+        if 'error' in code:
+          error_msg = code['error']
+        else:
+          error_msg = 'No code was supplied in the query parameters.'
+        raise FlowExchangeError(error_msg)
+      else:
+        code = code['code']
+
+    body = urllib.urlencode({
+        'grant_type': 'authorization_code',
+        'client_id': self.client_id,
+        'client_secret': self.client_secret,
+        'code': code,
+        'redirect_uri': self.redirect_uri,
+        'scope': self.scope,
+        })
+    headers = {
+        'content-type': 'application/x-www-form-urlencoded',
+    }
+
+    if self.user_agent is not None:
+      headers['user-agent'] = self.user_agent
+
+    if http is None:
+      http = httplib2.Http()
+
+    resp, content = http.request(self.token_uri, method='POST', body=body,
+                                 headers=headers)
+    d = _parse_exchange_token_response(content)
+    if resp.status == 200 and 'access_token' in d:
+      access_token = d['access_token']
+      refresh_token = d.get('refresh_token', None)
+      token_expiry = None
+      if 'expires_in' in d:
+        token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
+            seconds=int(d['expires_in']))
+
+      if 'id_token' in d:
+        d['id_token'] = _extract_id_token(d['id_token'])
+
+      logger.info('Successfully retrieved access token')
+      return OAuth2Credentials(access_token, self.client_id,
+                               self.client_secret, refresh_token, token_expiry,
+                               self.token_uri, self.user_agent,
+                               revoke_uri=self.revoke_uri,
+                               id_token=d.get('id_token', None),
+                               token_response=d)
+    else:
+      logger.info('Failed to retrieve access token: %s' % content)
+      if 'error' in d:
+        # you never know what those providers got to say
+        error_msg = unicode(d['error'])
+      else:
+        error_msg = 'Invalid response: %s.' % str(resp.status)
+      raise FlowExchangeError(error_msg)
+
+
+@util.positional(2)
+def flow_from_clientsecrets(filename, scope, redirect_uri=None,
+                            message=None, cache=None):
+  """Create a Flow from a clientsecrets file.
+
+  Will create the right kind of Flow based on the contents of the clientsecrets
+  file or will raise InvalidClientSecretsError for unknown types of Flows.
+
+  Args:
+    filename: string, File name of client secrets.
+    scope: string or iterable of strings, scope(s) to request.
+    redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
+      a non-web-based application, or a URI that handles the callback from
+      the authorization server.
+    message: string, A friendly string to display to the user if the
+      clientsecrets file is missing or invalid. If message is provided then
+      sys.exit will be called in the case of an error. If message in not
+      provided then clientsecrets.InvalidClientSecretsError will be raised.
+    cache: An optional cache service client that implements get() and set()
+      methods. See clientsecrets.loadfile() for details.
+
+  Returns:
+    A Flow object.
+
+  Raises:
+    UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
+    clientsecrets.InvalidClientSecretsError if the clientsecrets file is
+      invalid.
+  """
+  try:
+    client_type, client_info = clientsecrets.loadfile(filename, cache=cache)
+    if client_type in (clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED):
+      constructor_kwargs = {
+          'redirect_uri': redirect_uri,
+          'auth_uri': client_info['auth_uri'],
+          'token_uri': client_info['token_uri'],
+      }
+      revoke_uri = client_info.get('revoke_uri')
+      if revoke_uri is not None:
+        constructor_kwargs['revoke_uri'] = revoke_uri
+      return OAuth2WebServerFlow(
+          client_info['client_id'], client_info['client_secret'],
+          scope, **constructor_kwargs)
+
+  except clientsecrets.InvalidClientSecretsError:
+    if message:
+      sys.exit(message)
+    else:
+      raise
+  else:
+    raise UnknownClientSecretsFlowError(
+        'This OAuth 2.0 flow is unsupported: %r' % client_type)
diff --git a/oauth2client/clientsecrets.py b/oauth2client/clientsecrets.py
new file mode 100644
index 0000000..ac99aae
--- /dev/null
+++ b/oauth2client/clientsecrets.py
@@ -0,0 +1,153 @@
+# Copyright (C) 2011 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for reading OAuth 2.0 client secret files.
+
+A client_secrets.json file contains all the information needed to interact with
+an OAuth 2.0 protected service.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+
+from anyjson import simplejson
+
+# Properties that make a client_secrets.json file valid.
+TYPE_WEB = 'web'
+TYPE_INSTALLED = 'installed'
+
+VALID_CLIENT = {
+    TYPE_WEB: {
+        'required': [
+            'client_id',
+            'client_secret',
+            'redirect_uris',
+            'auth_uri',
+            'token_uri',
+        ],
+        'string': [
+            'client_id',
+            'client_secret',
+        ],
+    },
+    TYPE_INSTALLED: {
+        'required': [
+            'client_id',
+            'client_secret',
+            'redirect_uris',
+            'auth_uri',
+            'token_uri',
+        ],
+        'string': [
+            'client_id',
+            'client_secret',
+        ],
+    },
+}
+
+
+class Error(Exception):
+  """Base error for this module."""
+  pass
+
+
+class InvalidClientSecretsError(Error):
+  """Format of ClientSecrets file is invalid."""
+  pass
+
+
+def _validate_clientsecrets(obj):
+  if obj is None or len(obj) != 1:
+    raise InvalidClientSecretsError('Invalid file format.')
+  client_type = obj.keys()[0]
+  if client_type not in VALID_CLIENT.keys():
+    raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
+  client_info = obj[client_type]
+  for prop_name in VALID_CLIENT[client_type]['required']:
+    if prop_name not in client_info:
+      raise InvalidClientSecretsError(
+        'Missing property "%s" in a client type of "%s".' % (prop_name,
+                                                           client_type))
+  for prop_name in VALID_CLIENT[client_type]['string']:
+    if client_info[prop_name].startswith('[['):
+      raise InvalidClientSecretsError(
+        'Property "%s" is not configured.' % prop_name)
+  return client_type, client_info
+
+
+def load(fp):
+  obj = simplejson.load(fp)
+  return _validate_clientsecrets(obj)
+
+
+def loads(s):
+  obj = simplejson.loads(s)
+  return _validate_clientsecrets(obj)
+
+
+def _loadfile(filename):
+  try:
+    fp = file(filename, 'r')
+    try:
+      obj = simplejson.load(fp)
+    finally:
+      fp.close()
+  except IOError:
+    raise InvalidClientSecretsError('File not found: "%s"' % filename)
+  return _validate_clientsecrets(obj)
+
+
+def loadfile(filename, cache=None):
+  """Loading of client_secrets JSON file, optionally backed by a cache.
+
+  Typical cache storage would be App Engine memcache service,
+  but you can pass in any other cache client that implements
+  these methods:
+    - get(key, namespace=ns)
+    - set(key, value, namespace=ns)
+
+  Usage:
+    # without caching
+    client_type, client_info = loadfile('secrets.json')
+    # using App Engine memcache service
+    from google.appengine.api import memcache
+    client_type, client_info = loadfile('secrets.json', cache=memcache)
+
+  Args:
+    filename: string, Path to a client_secrets.json file on a filesystem.
+    cache: An optional cache service client that implements get() and set()
+      methods. If not specified, the file is always being loaded from
+      a filesystem.
+
+  Raises:
+    InvalidClientSecretsError: In case of a validation error or some
+      I/O failure. Can happen only on cache miss.
+
+  Returns:
+    (client_type, client_info) tuple, as _loadfile() normally would.
+    JSON contents is validated only during first load. Cache hits are not
+    validated.
+  """
+  _SECRET_NAMESPACE = 'oauth2client:secrets#ns'
+
+  if not cache:
+    return _loadfile(filename)
+
+  obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
+  if obj is None:
+    client_type, client_info = _loadfile(filename)
+    obj = {client_type: client_info}
+    cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
+
+  return obj.iteritems().next()
diff --git a/oauth2client/crypt.py b/oauth2client/crypt.py
new file mode 100644
index 0000000..2d31815
--- /dev/null
+++ b/oauth2client/crypt.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python2.4
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2011 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import hashlib
+import logging
+import time
+
+from anyjson import simplejson
+
+
+CLOCK_SKEW_SECS = 300  # 5 minutes in seconds
+AUTH_TOKEN_LIFETIME_SECS = 300  # 5 minutes in seconds
+MAX_TOKEN_LIFETIME_SECS = 86400  # 1 day in seconds
+
+
+logger = logging.getLogger(__name__)
+
+
+class AppIdentityError(Exception):
+  pass
+
+
+try:
+  from OpenSSL import crypto
+
+
+  class OpenSSLVerifier(object):
+    """Verifies the signature on a message."""
+
+    def __init__(self, pubkey):
+      """Constructor.
+
+      Args:
+        pubkey, OpenSSL.crypto.PKey, The public key to verify with.
+      """
+      self._pubkey = pubkey
+
+    def verify(self, message, signature):
+      """Verifies a message against a signature.
+
+      Args:
+        message: string, The message to verify.
+        signature: string, The signature on the message.
+
+      Returns:
+        True if message was signed by the private key associated with the public
+        key that this object was constructed with.
+      """
+      try:
+        crypto.verify(self._pubkey, signature, message, 'sha256')
+        return True
+      except:
+        return False
+
+    @staticmethod
+    def from_string(key_pem, is_x509_cert):
+      """Construct a Verified instance from a string.
+
+      Args:
+        key_pem: string, public key in PEM format.
+        is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
+          expected to be an RSA key in PEM format.
+
+      Returns:
+        Verifier instance.
+
+      Raises:
+        OpenSSL.crypto.Error if the key_pem can't be parsed.
+      """
+      if is_x509_cert:
+        pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
+      else:
+        pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
+      return OpenSSLVerifier(pubkey)
+
+
+  class OpenSSLSigner(object):
+    """Signs messages with a private key."""
+
+    def __init__(self, pkey):
+      """Constructor.
+
+      Args:
+        pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
+      """
+      self._key = pkey
+
+    def sign(self, message):
+      """Signs a message.
+
+      Args:
+        message: string, Message to be signed.
+
+      Returns:
+        string, The signature of the message for the given key.
+      """
+      return crypto.sign(self._key, message, 'sha256')
+
+    @staticmethod
+    def from_string(key, password='notasecret'):
+      """Construct a Signer instance from a string.
+
+      Args:
+        key: string, private key in PKCS12 or PEM format.
+        password: string, password for the private key file.
+
+      Returns:
+        Signer instance.
+
+      Raises:
+        OpenSSL.crypto.Error if the key can't be parsed.
+      """
+      if key.startswith('-----BEGIN '):
+        pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
+      else:
+        pkey = crypto.load_pkcs12(key, password).get_privatekey()
+      return OpenSSLSigner(pkey)
+
+except ImportError:
+  OpenSSLVerifier = None
+  OpenSSLSigner = None
+
+
+try:
+  from Crypto.PublicKey import RSA
+  from Crypto.Hash import SHA256
+  from Crypto.Signature import PKCS1_v1_5
+
+
+  class PyCryptoVerifier(object):
+    """Verifies the signature on a message."""
+
+    def __init__(self, pubkey):
+      """Constructor.
+
+      Args:
+        pubkey, OpenSSL.crypto.PKey (or equiv), The public key to verify with.
+      """
+      self._pubkey = pubkey
+
+    def verify(self, message, signature):
+      """Verifies a message against a signature.
+
+      Args:
+        message: string, The message to verify.
+        signature: string, The signature on the message.
+
+      Returns:
+        True if message was signed by the private key associated with the public
+        key that this object was constructed with.
+      """
+      try:
+        return PKCS1_v1_5.new(self._pubkey).verify(
+            SHA256.new(message), signature)
+      except:
+        return False
+
+    @staticmethod
+    def from_string(key_pem, is_x509_cert):
+      """Construct a Verified instance from a string.
+
+      Args:
+        key_pem: string, public key in PEM format.
+        is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
+          expected to be an RSA key in PEM format.
+
+      Returns:
+        Verifier instance.
+
+      Raises:
+        NotImplementedError if is_x509_cert is true.
+      """
+      if is_x509_cert:
+        raise NotImplementedError(
+            'X509 certs are not supported by the PyCrypto library. '
+            'Try using PyOpenSSL if native code is an option.')
+      else:
+        pubkey = RSA.importKey(key_pem)
+      return PyCryptoVerifier(pubkey)
+
+
+  class PyCryptoSigner(object):
+    """Signs messages with a private key."""
+
+    def __init__(self, pkey):
+      """Constructor.
+
+      Args:
+        pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
+      """
+      self._key = pkey
+
+    def sign(self, message):
+      """Signs a message.
+
+      Args:
+        message: string, Message to be signed.
+
+      Returns:
+        string, The signature of the message for the given key.
+      """
+      return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
+
+    @staticmethod
+    def from_string(key, password='notasecret'):
+      """Construct a Signer instance from a string.
+
+      Args:
+        key: string, private key in PEM format.
+        password: string, password for private key file. Unused for PEM files.
+
+      Returns:
+        Signer instance.
+
+      Raises:
+        NotImplementedError if they key isn't in PEM format.
+      """
+      if key.startswith('-----BEGIN '):
+        pkey = RSA.importKey(key)
+      else:
+        raise NotImplementedError(
+            'PKCS12 format is not supported by the PyCrpto library. '
+            'Try converting to a "PEM" '
+            '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) '
+            'or using PyOpenSSL if native code is an option.')
+      return PyCryptoSigner(pkey)
+
+except ImportError:
+  PyCryptoVerifier = None
+  PyCryptoSigner = None
+
+
+if OpenSSLSigner:
+  Signer = OpenSSLSigner
+  Verifier = OpenSSLVerifier
+elif PyCryptoSigner:
+  Signer = PyCryptoSigner
+  Verifier = PyCryptoVerifier
+else:
+  raise ImportError('No encryption library found. Please install either '
+                    'PyOpenSSL, or PyCrypto 2.6 or later')
+
+
+def _urlsafe_b64encode(raw_bytes):
+  return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
+
+
+def _urlsafe_b64decode(b64string):
+  # Guard against unicode strings, which base64 can't handle.
+  b64string = b64string.encode('ascii')
+  padded = b64string + '=' * (4 - len(b64string) % 4)
+  return base64.urlsafe_b64decode(padded)
+
+
+def _json_encode(data):
+  return simplejson.dumps(data, separators = (',', ':'))
+
+
+def make_signed_jwt(signer, payload):
+  """Make a signed JWT.
+
+  See http://self-issued.info/docs/draft-jones-json-web-token.html.
+
+  Args:
+    signer: crypt.Signer, Cryptographic signer.
+    payload: dict, Dictionary of data to convert to JSON and then sign.
+
+  Returns:
+    string, The JWT for the payload.
+  """
+  header = {'typ': 'JWT', 'alg': 'RS256'}
+
+  segments = [
+          _urlsafe_b64encode(_json_encode(header)),
+          _urlsafe_b64encode(_json_encode(payload)),
+  ]
+  signing_input = '.'.join(segments)
+
+  signature = signer.sign(signing_input)
+  segments.append(_urlsafe_b64encode(signature))
+
+  logger.debug(str(segments))
+
+  return '.'.join(segments)
+
+
+def verify_signed_jwt_with_certs(jwt, certs, audience):
+  """Verify a JWT against public certs.
+
+  See http://self-issued.info/docs/draft-jones-json-web-token.html.
+
+  Args:
+    jwt: string, A JWT.
+    certs: dict, Dictionary where values of public keys in PEM format.
+    audience: string, The audience, 'aud', that this JWT should contain. If
+      None then the JWT's 'aud' parameter is not verified.
+
+  Returns:
+    dict, The deserialized JSON payload in the JWT.
+
+  Raises:
+    AppIdentityError if any checks are failed.
+  """
+  segments = jwt.split('.')
+
+  if (len(segments) != 3):
+    raise AppIdentityError(
+      'Wrong number of segments in token: %s' % jwt)
+  signed = '%s.%s' % (segments[0], segments[1])
+
+  signature = _urlsafe_b64decode(segments[2])
+
+  # Parse token.
+  json_body = _urlsafe_b64decode(segments[1])
+  try:
+    parsed = simplejson.loads(json_body)
+  except:
+    raise AppIdentityError('Can\'t parse token: %s' % json_body)
+
+  # Check signature.
+  verified = False
+  for (keyname, pem) in certs.items():
+    verifier = Verifier.from_string(pem, True)
+    if (verifier.verify(signed, signature)):
+      verified = True
+      break
+  if not verified:
+    raise AppIdentityError('Invalid token signature: %s' % jwt)
+
+  # Check creation timestamp.
+  iat = parsed.get('iat')
+  if iat is None:
+    raise AppIdentityError('No iat field in token: %s' % json_body)
+  earliest = iat - CLOCK_SKEW_SECS
+
+  # Check expiration timestamp.
+  now = long(time.time())
+  exp = parsed.get('exp')
+  if exp is None:
+    raise AppIdentityError('No exp field in token: %s' % json_body)
+  if exp >= now + MAX_TOKEN_LIFETIME_SECS:
+    raise AppIdentityError(
+      'exp field too far in future: %s' % json_body)
+  latest = exp + CLOCK_SKEW_SECS
+
+  if now < earliest:
+    raise AppIdentityError('Token used too early, %d < %d: %s' %
+      (now, earliest, json_body))
+  if now > latest:
+    raise AppIdentityError('Token used too late, %d > %d: %s' %
+      (now, latest, json_body))
+
+  # Check audience.
+  if audience is not None:
+    aud = parsed.get('aud')
+    if aud is None:
+      raise AppIdentityError('No aud field in token: %s' % json_body)
+    if aud != audience:
+      raise AppIdentityError('Wrong recipient, %s != %s: %s' %
+          (aud, audience, json_body))
+
+  return parsed
diff --git a/oauth2client/django_orm.py b/oauth2client/django_orm.py
new file mode 100644
index 0000000..d54d20c
--- /dev/null
+++ b/oauth2client/django_orm.py
@@ -0,0 +1,134 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OAuth 2.0 utilities for Django.
+
+Utilities for using OAuth 2.0 in conjunction with
+the Django datastore.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import oauth2client
+import base64
+import pickle
+
+from django.db import models
+from oauth2client.client import Storage as BaseStorage
+
+class CredentialsField(models.Field):
+
+  __metaclass__ = models.SubfieldBase
+
+  def __init__(self, *args, **kwargs):
+    if 'null' not in kwargs:
+      kwargs['null'] = True
+    super(CredentialsField, self).__init__(*args, **kwargs)
+
+  def get_internal_type(self):
+    return "TextField"
+
+  def to_python(self, value):
+    if value is None:
+      return None
+    if isinstance(value, oauth2client.client.Credentials):
+      return value
+    return pickle.loads(base64.b64decode(value))
+
+  def get_db_prep_value(self, value, connection, prepared=False):
+    if value is None:
+      return None
+    return base64.b64encode(pickle.dumps(value))
+
+
+class FlowField(models.Field):
+
+  __metaclass__ = models.SubfieldBase
+
+  def __init__(self, *args, **kwargs):
+    if 'null' not in kwargs:
+      kwargs['null'] = True
+    super(FlowField, self).__init__(*args, **kwargs)
+
+  def get_internal_type(self):
+    return "TextField"
+
+  def to_python(self, value):
+    if value is None:
+      return None
+    if isinstance(value, oauth2client.client.Flow):
+      return value
+    return pickle.loads(base64.b64decode(value))
+
+  def get_db_prep_value(self, value, connection, prepared=False):
+    if value is None:
+      return None
+    return base64.b64encode(pickle.dumps(value))
+
+
+class Storage(BaseStorage):
+  """Store and retrieve a single credential to and from
+  the datastore.
+
+  This Storage helper presumes the Credentials
+  have been stored as a CredenialsField
+  on a db model class.
+  """
+
+  def __init__(self, model_class, key_name, key_value, property_name):
+    """Constructor for Storage.
+
+    Args:
+      model: db.Model, model class
+      key_name: string, key name for the entity that has the credentials
+      key_value: string, key value for the entity that has the credentials
+      property_name: string, name of the property that is an CredentialsProperty
+    """
+    self.model_class = model_class
+    self.key_name = key_name
+    self.key_value = key_value
+    self.property_name = property_name
+
+  def locked_get(self):
+    """Retrieve Credential from datastore.
+
+    Returns:
+      oauth2client.Credentials
+    """
+    credential = None
+
+    query = {self.key_name: self.key_value}
+    entities = self.model_class.objects.filter(**query)
+    if len(entities) > 0:
+      credential = getattr(entities[0], self.property_name)
+      if credential and hasattr(credential, 'set_store'):
+        credential.set_store(self)
+    return credential
+
+  def locked_put(self, credentials):
+    """Write a Credentials to the datastore.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    args = {self.key_name: self.key_value}
+    entity = self.model_class(**args)
+    setattr(entity, self.property_name, credentials)
+    entity.save()
+
+  def locked_delete(self):
+    """Delete Credentials from the datastore."""
+
+    query = {self.key_name: self.key_value}
+    entities = self.model_class.objects.filter(**query).delete()
diff --git a/oauth2client/file.py b/oauth2client/file.py
new file mode 100644
index 0000000..1895f94
--- /dev/null
+++ b/oauth2client/file.py
@@ -0,0 +1,124 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for OAuth.
+
+Utilities for making it easier to work with OAuth 2.0
+credentials.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import os
+import stat
+import threading
+
+from anyjson import simplejson
+from client import Storage as BaseStorage
+from client import Credentials
+
+
+class CredentialsFileSymbolicLinkError(Exception):
+  """Credentials files must not be symbolic links."""
+
+
+class Storage(BaseStorage):
+  """Store and retrieve a single credential to and from a file."""
+
+  def __init__(self, filename):
+    self._filename = filename
+    self._lock = threading.Lock()
+
+  def _validate_file(self):
+    if os.path.islink(self._filename):
+      raise CredentialsFileSymbolicLinkError(
+          'File: %s is a symbolic link.' % self._filename)
+
+  def acquire_lock(self):
+    """Acquires any lock necessary to access this Storage.
+
+    This lock is not reentrant."""
+    self._lock.acquire()
+
+  def release_lock(self):
+    """Release the Storage lock.
+
+    Trying to release a lock that isn't held will result in a
+    RuntimeError.
+    """
+    self._lock.release()
+
+  def locked_get(self):
+    """Retrieve Credential from file.
+
+    Returns:
+      oauth2client.client.Credentials
+
+    Raises:
+      CredentialsFileSymbolicLinkError if the file is a symbolic link.
+    """
+    credentials = None
+    self._validate_file()
+    try:
+      f = open(self._filename, 'rb')
+      content = f.read()
+      f.close()
+    except IOError:
+      return credentials
+
+    try:
+      credentials = Credentials.new_from_json(content)
+      credentials.set_store(self)
+    except ValueError:
+      pass
+
+    return credentials
+
+  def _create_file_if_needed(self):
+    """Create an empty file if necessary.
+
+    This method will not initialize the file. Instead it implements a
+    simple version of "touch" to ensure the file has been created.
+    """
+    if not os.path.exists(self._filename):
+      old_umask = os.umask(0177)
+      try:
+        open(self._filename, 'a+b').close()
+      finally:
+        os.umask(old_umask)
+
+  def locked_put(self, credentials):
+    """Write Credentials to file.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+
+    Raises:
+      CredentialsFileSymbolicLinkError if the file is a symbolic link.
+    """
+
+    self._create_file_if_needed()
+    self._validate_file()
+    f = open(self._filename, 'wb')
+    f.write(credentials.to_json())
+    f.close()
+
+  def locked_delete(self):
+    """Delete Credentials file.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+
+    os.unlink(self._filename)
diff --git a/oauth2client/gce.py b/oauth2client/gce.py
new file mode 100644
index 0000000..c7fd7c1
--- /dev/null
+++ b/oauth2client/gce.py
@@ -0,0 +1,90 @@
+# Copyright (C) 2012 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for Google Compute Engine
+
+Utilities for making it easier to use OAuth 2.0 on Google Compute Engine.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import httplib2
+import logging
+import uritemplate
+
+from oauth2client import util
+from oauth2client.anyjson import simplejson
+from oauth2client.client import AccessTokenRefreshError
+from oauth2client.client import AssertionCredentials
+
+logger = logging.getLogger(__name__)
+
+# URI Template for the endpoint that returns access_tokens.
+META = ('http://metadata.google.internal/0.1/meta-data/service-accounts/'
+        'default/acquire{?scope}')
+
+
+class AppAssertionCredentials(AssertionCredentials):
+  """Credentials object for Compute Engine Assertion Grants
+
+  This object will allow a Compute Engine instance to identify itself to
+  Google and other OAuth 2.0 servers that can verify assertions. It can be used
+  for the purpose of accessing data stored under an account assigned to the
+  Compute Engine instance itself.
+
+  This credential does not require a flow to instantiate because it represents
+  a two legged flow, and therefore has all of the required information to
+  generate and refresh its own access tokens.
+  """
+
+  @util.positional(2)
+  def __init__(self, scope, **kwargs):
+    """Constructor for AppAssertionCredentials
+
+    Args:
+      scope: string or iterable of strings, scope(s) of the credentials being
+        requested.
+    """
+    self.scope = util.scopes_to_string(scope)
+
+    # Assertion type is no longer used, but still in the parent class signature.
+    super(AppAssertionCredentials, self).__init__(None)
+
+  @classmethod
+  def from_json(cls, json):
+    data = simplejson.loads(json)
+    return AppAssertionCredentials(data['scope'])
+
+  def _refresh(self, http_request):
+    """Refreshes the access_token.
+
+    Skip all the storage hoops and just refresh using the API.
+
+    Args:
+      http_request: callable, a callable that matches the method signature of
+        httplib2.Http.request, used to make the refresh request.
+
+    Raises:
+      AccessTokenRefreshError: When the refresh fails.
+    """
+    uri = uritemplate.expand(META, {'scope': self.scope})
+    response, content = http_request(uri)
+    if response.status == 200:
+      try:
+        d = simplejson.loads(content)
+      except StandardError, e:
+        raise AccessTokenRefreshError(str(e))
+      self.access_token = d['accessToken']
+    else:
+      raise AccessTokenRefreshError(content)
diff --git a/oauth2client/keyring_storage.py b/oauth2client/keyring_storage.py
new file mode 100644
index 0000000..efe2949
--- /dev/null
+++ b/oauth2client/keyring_storage.py
@@ -0,0 +1,109 @@
+# Copyright (C) 2012 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A keyring based Storage.
+
+A Storage for Credentials that uses the keyring module.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+
+import keyring
+import threading
+
+from client import Storage as BaseStorage
+from client import Credentials
+
+
+class Storage(BaseStorage):
+  """Store and retrieve a single credential to and from the keyring.
+
+  To use this module you must have the keyring module installed. See
+  <http://pypi.python.org/pypi/keyring/>. This is an optional module and is not
+  installed with oauth2client by default because it does not work on all the
+  platforms that oauth2client supports, such as Google App Engine.
+
+  The keyring module <http://pypi.python.org/pypi/keyring/> is a cross-platform
+  library for access the keyring capabilities of the local system. The user will
+  be prompted for their keyring password when this module is used, and the
+  manner in which the user is prompted will vary per platform.
+
+  Usage:
+    from oauth2client.keyring_storage import Storage
+
+    s = Storage('name_of_application', 'user1')
+    credentials = s.get()
+
+  """
+
+  def __init__(self, service_name, user_name):
+    """Constructor.
+
+    Args:
+      service_name: string, The name of the service under which the credentials
+        are stored.
+      user_name: string, The name of the user to store credentials for.
+    """
+    self._service_name = service_name
+    self._user_name = user_name
+    self._lock = threading.Lock()
+
+  def acquire_lock(self):
+    """Acquires any lock necessary to access this Storage.
+
+    This lock is not reentrant."""
+    self._lock.acquire()
+
+  def release_lock(self):
+    """Release the Storage lock.
+
+    Trying to release a lock that isn't held will result in a
+    RuntimeError.
+    """
+    self._lock.release()
+
+  def locked_get(self):
+    """Retrieve Credential from file.
+
+    Returns:
+      oauth2client.client.Credentials
+    """
+    credentials = None
+    content = keyring.get_password(self._service_name, self._user_name)
+
+    if content is not None:
+      try:
+        credentials = Credentials.new_from_json(content)
+        credentials.set_store(self)
+      except ValueError:
+        pass
+
+    return credentials
+
+  def locked_put(self, credentials):
+    """Write Credentials to file.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    keyring.set_password(self._service_name, self._user_name,
+                         credentials.to_json())
+
+  def locked_delete(self):
+    """Delete Credentials file.
+
+    Args:
+      credentials: Credentials, the credentials to store.
+    """
+    keyring.set_password(self._service_name, self._user_name, '')
diff --git a/oauth2client/locked_file.py b/oauth2client/locked_file.py
new file mode 100644
index 0000000..26f783e
--- /dev/null
+++ b/oauth2client/locked_file.py
@@ -0,0 +1,361 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Locked file interface that should work on Unix and Windows pythons.
+
+This module first tries to use fcntl locking to ensure serialized access
+to a file, then falls back on a lock file if that is unavialable.
+
+Usage:
+    f = LockedFile('filename', 'r+b', 'rb')
+    f.open_and_lock()
+    if f.is_locked():
+      print 'Acquired filename with r+b mode'
+      f.file_handle().write('locked data')
+    else:
+      print 'Aquired filename with rb mode'
+    f.unlock_and_close()
+"""
+
+__author__ = 'cache@google.com (David T McWherter)'
+
+import errno
+import logging
+import os
+import time
+
+from oauth2client import util
+
+logger = logging.getLogger(__name__)
+
+
+class CredentialsFileSymbolicLinkError(Exception):
+  """Credentials files must not be symbolic links."""
+
+
+class AlreadyLockedException(Exception):
+  """Trying to lock a file that has already been locked by the LockedFile."""
+  pass
+
+
+def validate_file(filename):
+  if os.path.islink(filename):
+    raise CredentialsFileSymbolicLinkError(
+        'File: %s is a symbolic link.' % filename)
+
+class _Opener(object):
+  """Base class for different locking primitives."""
+
+  def __init__(self, filename, mode, fallback_mode):
+    """Create an Opener.
+
+    Args:
+      filename: string, The pathname of the file.
+      mode: string, The preferred mode to access the file with.
+      fallback_mode: string, The mode to use if locking fails.
+    """
+    self._locked = False
+    self._filename = filename
+    self._mode = mode
+    self._fallback_mode = fallback_mode
+    self._fh = None
+
+  def is_locked(self):
+    """Was the file locked."""
+    return self._locked
+
+  def file_handle(self):
+    """The file handle to the file. Valid only after opened."""
+    return self._fh
+
+  def filename(self):
+    """The filename that is being locked."""
+    return self._filename
+
+  def open_and_lock(self, timeout, delay):
+    """Open the file and lock it.
+
+    Args:
+      timeout: float, How long to try to lock for.
+      delay: float, How long to wait between retries.
+    """
+    pass
+
+  def unlock_and_close(self):
+    """Unlock and close the file."""
+    pass
+
+
+class _PosixOpener(_Opener):
+  """Lock files using Posix advisory lock files."""
+
+  def open_and_lock(self, timeout, delay):
+    """Open the file and lock it.
+
+    Tries to create a .lock file next to the file we're trying to open.
+
+    Args:
+      timeout: float, How long to try to lock for.
+      delay: float, How long to wait between retries.
+
+    Raises:
+      AlreadyLockedException: if the lock is already acquired.
+      IOError: if the open fails.
+      CredentialsFileSymbolicLinkError if the file is a symbolic link.
+    """
+    if self._locked:
+      raise AlreadyLockedException('File %s is already locked' %
+                                   self._filename)
+    self._locked = False
+
+    validate_file(self._filename)
+    try:
+      self._fh = open(self._filename, self._mode)
+    except IOError, e:
+      # If we can't access with _mode, try _fallback_mode and don't lock.
+      if e.errno == errno.EACCES:
+        self._fh = open(self._filename, self._fallback_mode)
+        return
+
+    lock_filename = self._posix_lockfile(self._filename)
+    start_time = time.time()
+    while True:
+      try:
+        self._lock_fd = os.open(lock_filename,
+                                os.O_CREAT|os.O_EXCL|os.O_RDWR)
+        self._locked = True
+        break
+
+      except OSError, e:
+        if e.errno != errno.EEXIST:
+          raise
+        if (time.time() - start_time) >= timeout:
+          logger.warn('Could not acquire lock %s in %s seconds' % (
+              lock_filename, timeout))
+          # Close the file and open in fallback_mode.
+          if self._fh:
+            self._fh.close()
+          self._fh = open(self._filename, self._fallback_mode)
+          return
+        time.sleep(delay)
+
+  def unlock_and_close(self):
+    """Unlock a file by removing the .lock file, and close the handle."""
+    if self._locked:
+      lock_filename = self._posix_lockfile(self._filename)
+      os.close(self._lock_fd)
+      os.unlink(lock_filename)
+      self._locked = False
+      self._lock_fd = None
+    if self._fh:
+      self._fh.close()
+
+  def _posix_lockfile(self, filename):
+    """The name of the lock file to use for posix locking."""
+    return '%s.lock' % filename
+
+
+try:
+  import fcntl
+
+  class _FcntlOpener(_Opener):
+    """Open, lock, and unlock a file using fcntl.lockf."""
+
+    def open_and_lock(self, timeout, delay):
+      """Open the file and lock it.
+
+      Args:
+        timeout: float, How long to try to lock for.
+        delay: float, How long to wait between retries
+
+      Raises:
+        AlreadyLockedException: if the lock is already acquired.
+        IOError: if the open fails.
+        CredentialsFileSymbolicLinkError if the file is a symbolic link.
+      """
+      if self._locked:
+        raise AlreadyLockedException('File %s is already locked' %
+                                     self._filename)
+      start_time = time.time()
+
+      validate_file(self._filename)
+      try:
+        self._fh = open(self._filename, self._mode)
+      except IOError, e:
+        # If we can't access with _mode, try _fallback_mode and don't lock.
+        if e.errno == errno.EACCES:
+          self._fh = open(self._filename, self._fallback_mode)
+          return
+
+      # We opened in _mode, try to lock the file.
+      while True:
+        try:
+          fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
+          self._locked = True
+          return
+        except IOError, e:
+          # If not retrying, then just pass on the error.
+          if timeout == 0:
+            raise e
+          if e.errno != errno.EACCES:
+            raise e
+          # We could not acquire the lock. Try again.
+          if (time.time() - start_time) >= timeout:
+            logger.warn('Could not lock %s in %s seconds' % (
+                self._filename, timeout))
+            if self._fh:
+              self._fh.close()
+            self._fh = open(self._filename, self._fallback_mode)
+            return
+          time.sleep(delay)
+
+    def unlock_and_close(self):
+      """Close and unlock the file using the fcntl.lockf primitive."""
+      if self._locked:
+        fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
+      self._locked = False
+      if self._fh:
+        self._fh.close()
+except ImportError:
+  _FcntlOpener = None
+
+
+try:
+  import pywintypes
+  import win32con
+  import win32file
+
+  class _Win32Opener(_Opener):
+    """Open, lock, and unlock a file using windows primitives."""
+
+    # Error #33:
+    #  'The process cannot access the file because another process'
+    FILE_IN_USE_ERROR = 33
+
+    # Error #158:
+    #  'The segment is already unlocked.'
+    FILE_ALREADY_UNLOCKED_ERROR = 158
+
+    def open_and_lock(self, timeout, delay):
+      """Open the file and lock it.
+
+      Args:
+        timeout: float, How long to try to lock for.
+        delay: float, How long to wait between retries
+
+      Raises:
+        AlreadyLockedException: if the lock is already acquired.
+        IOError: if the open fails.
+        CredentialsFileSymbolicLinkError if the file is a symbolic link.
+      """
+      if self._locked:
+        raise AlreadyLockedException('File %s is already locked' %
+                                     self._filename)
+      start_time = time.time()
+
+      validate_file(self._filename)
+      try:
+        self._fh = open(self._filename, self._mode)
+      except IOError, e:
+        # If we can't access with _mode, try _fallback_mode and don't lock.
+        if e.errno == errno.EACCES:
+          self._fh = open(self._filename, self._fallback_mode)
+          return
+
+      # We opened in _mode, try to lock the file.
+      while True:
+        try:
+          hfile = win32file._get_osfhandle(self._fh.fileno())
+          win32file.LockFileEx(
+              hfile,
+              (win32con.LOCKFILE_FAIL_IMMEDIATELY|
+               win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
+              pywintypes.OVERLAPPED())
+          self._locked = True
+          return
+        except pywintypes.error, e:
+          if timeout == 0:
+            raise e
+
+          # If the error is not that the file is already in use, raise.
+          if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
+            raise
+
+          # We could not acquire the lock. Try again.
+          if (time.time() - start_time) >= timeout:
+            logger.warn('Could not lock %s in %s seconds' % (
+                self._filename, timeout))
+            if self._fh:
+              self._fh.close()
+            self._fh = open(self._filename, self._fallback_mode)
+            return
+          time.sleep(delay)
+
+    def unlock_and_close(self):
+      """Close and unlock the file using the win32 primitive."""
+      if self._locked:
+        try:
+          hfile = win32file._get_osfhandle(self._fh.fileno())
+          win32file.UnlockFileEx(hfile, 0, -0x10000, pywintypes.OVERLAPPED())
+        except pywintypes.error, e:
+          if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
+            raise
+      self._locked = False
+      if self._fh:
+        self._fh.close()
+except ImportError:
+  _Win32Opener = None
+
+
+class LockedFile(object):
+  """Represent a file that has exclusive access."""
+
+  @util.positional(4)
+  def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
+    """Construct a LockedFile.
+
+    Args:
+      filename: string, The path of the file to open.
+      mode: string, The mode to try to open the file with.
+      fallback_mode: string, The mode to use if locking fails.
+      use_native_locking: bool, Whether or not fcntl/win32 locking is used.
+    """
+    opener = None
+    if not opener and use_native_locking:
+      if _Win32Opener:
+        opener = _Win32Opener(filename, mode, fallback_mode)
+      if _FcntlOpener:
+        opener = _FcntlOpener(filename, mode, fallback_mode)
+
+    if not opener:
+      opener = _PosixOpener(filename, mode, fallback_mode)
+
+    self._opener = opener
+
+  def filename(self):
+    """Return the filename we were constructed with."""
+    return self._opener._filename
+
+  def file_handle(self):
+    """Return the file_handle to the opened file."""
+    return self._opener.file_handle()
+
+  def is_locked(self):
+    """Return whether we successfully locked the file."""
+    return self._opener.is_locked()
+
+  def open_and_lock(self, timeout=0, delay=0.05):
+    """Open the file, trying to lock it.
+
+    Args:
+      timeout: float, The number of seconds to try to acquire the lock.
+      delay: float, The number of seconds to wait between retry attempts.
+
+    Raises:
+      AlreadyLockedException: if the lock is already acquired.
+      IOError: if the open fails.
+    """
+    self._opener.open_and_lock(timeout, delay)
+
+  def unlock_and_close(self):
+    """Unlock and close a file."""
+    self._opener.unlock_and_close()
diff --git a/oauth2client/multistore_file.py b/oauth2client/multistore_file.py
new file mode 100644
index 0000000..e1b39f7
--- /dev/null
+++ b/oauth2client/multistore_file.py
@@ -0,0 +1,409 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Multi-credential file store with lock support.
+
+This module implements a JSON credential store where multiple
+credentials can be stored in one file. That file supports locking
+both in a single process and across processes.
+
+The credential themselves are keyed off of:
+* client_id
+* user_agent
+* scope
+
+The format of the stored data is like so:
+{
+  'file_version': 1,
+  'data': [
+    {
+      'key': {
+        'clientId': '<client id>',
+        'userAgent': '<user agent>',
+        'scope': '<scope>'
+      },
+      'credential': {
+        # JSON serialized Credentials.
+      }
+    }
+  ]
+}
+"""
+
+__author__ = 'jbeda@google.com (Joe Beda)'
+
+import base64
+import errno
+import logging
+import os
+import threading
+
+from anyjson import simplejson
+from oauth2client.client import Storage as BaseStorage
+from oauth2client.client import Credentials
+from oauth2client import util
+from locked_file import LockedFile
+
+logger = logging.getLogger(__name__)
+
+# A dict from 'filename'->_MultiStore instances
+_multistores = {}
+_multistores_lock = threading.Lock()
+
+
+class Error(Exception):
+  """Base error for this module."""
+  pass
+
+
+class NewerCredentialStoreError(Error):
+  """The credential store is a newer version that supported."""
+  pass
+
+
+@util.positional(4)
+def get_credential_storage(filename, client_id, user_agent, scope,
+                           warn_on_readonly=True):
+  """Get a Storage instance for a credential.
+
+  Args:
+    filename: The JSON file storing a set of credentials
+    client_id: The client_id for the credential
+    user_agent: The user agent for the credential
+    scope: string or iterable of strings, Scope(s) being requested
+    warn_on_readonly: if True, log a warning if the store is readonly
+
+  Returns:
+    An object derived from client.Storage for getting/setting the
+    credential.
+  """
+  # Recreate the legacy key with these specific parameters
+  key = {'clientId': client_id, 'userAgent': user_agent,
+         'scope': util.scopes_to_string(scope)}
+  return get_credential_storage_custom_key(
+      filename, key, warn_on_readonly=warn_on_readonly)
+
+
+@util.positional(2)
+def get_credential_storage_custom_string_key(
+    filename, key_string, warn_on_readonly=True):
+  """Get a Storage instance for a credential using a single string as a key.
+
+  Allows you to provide a string as a custom key that will be used for
+  credential storage and retrieval.
+
+  Args:
+    filename: The JSON file storing a set of credentials
+    key_string: A string to use as the key for storing this credential.
+    warn_on_readonly: if True, log a warning if the store is readonly
+
+  Returns:
+    An object derived from client.Storage for getting/setting the
+    credential.
+  """
+  # Create a key dictionary that can be used
+  key_dict = {'key': key_string}
+  return get_credential_storage_custom_key(
+      filename, key_dict, warn_on_readonly=warn_on_readonly)
+
+
+@util.positional(2)
+def get_credential_storage_custom_key(
+    filename, key_dict, warn_on_readonly=True):
+  """Get a Storage instance for a credential using a dictionary as a key.
+
+  Allows you to provide a dictionary as a custom key that will be used for
+  credential storage and retrieval.
+
+  Args:
+    filename: The JSON file storing a set of credentials
+    key_dict: A dictionary to use as the key for storing this credential. There
+      is no ordering of the keys in the dictionary. Logically equivalent
+      dictionaries will produce equivalent storage keys.
+    warn_on_readonly: if True, log a warning if the store is readonly
+
+  Returns:
+    An object derived from client.Storage for getting/setting the
+    credential.
+  """
+  filename = os.path.expanduser(filename)
+  _multistores_lock.acquire()
+  try:
+    multistore = _multistores.setdefault(
+        filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly))
+  finally:
+    _multistores_lock.release()
+  key = util.dict_to_tuple_key(key_dict)
+  return multistore._get_storage(key)
+
+
+class _MultiStore(object):
+  """A file backed store for multiple credentials."""
+
+  @util.positional(2)
+  def __init__(self, filename, warn_on_readonly=True):
+    """Initialize the class.
+
+    This will create the file if necessary.
+    """
+    self._file = LockedFile(filename, 'r+b', 'rb')
+    self._thread_lock = threading.Lock()
+    self._read_only = False
+    self._warn_on_readonly = warn_on_readonly
+
+    self._create_file_if_needed()
+
+    # Cache of deserialized store. This is only valid after the
+    # _MultiStore is locked or _refresh_data_cache is called. This is
+    # of the form of:
+    #
+    # ((key, value), (key, value)...) -> OAuth2Credential
+    #
+    # If this is None, then the store hasn't been read yet.
+    self._data = None
+
+  class _Storage(BaseStorage):
+    """A Storage object that knows how to read/write a single credential."""
+
+    def __init__(self, multistore, key):
+      self._multistore = multistore
+      self._key = key
+
+    def acquire_lock(self):
+      """Acquires any lock necessary to access this Storage.
+
+      This lock is not reentrant.
+      """
+      self._multistore._lock()
+
+    def release_lock(self):
+      """Release the Storage lock.
+
+      Trying to release a lock that isn't held will result in a
+      RuntimeError.
+      """
+      self._multistore._unlock()
+
+    def locked_get(self):
+      """Retrieve credential.
+
+      The Storage lock must be held when this is called.
+
+      Returns:
+        oauth2client.client.Credentials
+      """
+      credential = self._multistore._get_credential(self._key)
+      if credential:
+        credential.set_store(self)
+      return credential
+
+    def locked_put(self, credentials):
+      """Write a credential.
+
+      The Storage lock must be held when this is called.
+
+      Args:
+        credentials: Credentials, the credentials to store.
+      """
+      self._multistore._update_credential(self._key, credentials)
+
+    def locked_delete(self):
+      """Delete a credential.
+
+      The Storage lock must be held when this is called.
+
+      Args:
+        credentials: Credentials, the credentials to store.
+      """
+      self._multistore._delete_credential(self._key)
+
+  def _create_file_if_needed(self):
+    """Create an empty file if necessary.
+
+    This method will not initialize the file. Instead it implements a
+    simple version of "touch" to ensure the file has been created.
+    """
+    if not os.path.exists(self._file.filename()):
+      old_umask = os.umask(0177)
+      try:
+        open(self._file.filename(), 'a+b').close()
+      finally:
+        os.umask(old_umask)
+
+  def _lock(self):
+    """Lock the entire multistore."""
+    self._thread_lock.acquire()
+    self._file.open_and_lock()
+    if not self._file.is_locked():
+      self._read_only = True
+      if self._warn_on_readonly:
+        logger.warn('The credentials file (%s) is not writable. Opening in '
+                    'read-only mode. Any refreshed credentials will only be '
+                    'valid for this run.' % self._file.filename())
+    if os.path.getsize(self._file.filename()) == 0:
+      logger.debug('Initializing empty multistore file')
+      # The multistore is empty so write out an empty file.
+      self._data = {}
+      self._write()
+    elif not self._read_only or self._data is None:
+      # Only refresh the data if we are read/write or we haven't
+      # cached the data yet. If we are readonly, we assume is isn't
+      # changing out from under us and that we only have to read it
+      # once. This prevents us from whacking any new access keys that
+      # we have cached in memory but were unable to write out.
+      self._refresh_data_cache()
+
+  def _unlock(self):
+    """Release the lock on the multistore."""
+    self._file.unlock_and_close()
+    self._thread_lock.release()
+
+  def _locked_json_read(self):
+    """Get the raw content of the multistore file.
+
+    The multistore must be locked when this is called.
+
+    Returns:
+      The contents of the multistore decoded as JSON.
+    """
+    assert self._thread_lock.locked()
+    self._file.file_handle().seek(0)
+    return simplejson.load(self._file.file_handle())
+
+  def _locked_json_write(self, data):
+    """Write a JSON serializable data structure to the multistore.
+
+    The multistore must be locked when this is called.
+
+    Args:
+      data: The data to be serialized and written.
+    """
+    assert self._thread_lock.locked()
+    if self._read_only:
+      return
+    self._file.file_handle().seek(0)
+    simplejson.dump(data, self._file.file_handle(), sort_keys=True, indent=2)
+    self._file.file_handle().truncate()
+
+  def _refresh_data_cache(self):
+    """Refresh the contents of the multistore.
+
+    The multistore must be locked when this is called.
+
+    Raises:
+      NewerCredentialStoreError: Raised when a newer client has written the
+        store.
+    """
+    self._data = {}
+    try:
+      raw_data = self._locked_json_read()
+    except Exception:
+      logger.warn('Credential data store could not be loaded. '
+                  'Will ignore and overwrite.')
+      return
+
+    version = 0
+    try:
+      version = raw_data['file_version']
+    except Exception:
+      logger.warn('Missing version for credential data store. It may be '
+                  'corrupt or an old version. Overwriting.')
+    if version > 1:
+      raise NewerCredentialStoreError(
+          'Credential file has file_version of %d. '
+          'Only file_version of 1 is supported.' % version)
+
+    credentials = []
+    try:
+      credentials = raw_data['data']
+    except (TypeError, KeyError):
+      pass
+
+    for cred_entry in credentials:
+      try:
+        (key, credential) = self._decode_credential_from_json(cred_entry)
+        self._data[key] = credential
+      except:
+        # If something goes wrong loading a credential, just ignore it
+        logger.info('Error decoding credential, skipping', exc_info=True)
+
+  def _decode_credential_from_json(self, cred_entry):
+    """Load a credential from our JSON serialization.
+
+    Args:
+      cred_entry: A dict entry from the data member of our format
+
+    Returns:
+      (key, cred) where the key is the key tuple and the cred is the
+        OAuth2Credential object.
+    """
+    raw_key = cred_entry['key']
+    key = util.dict_to_tuple_key(raw_key)
+    credential = None
+    credential = Credentials.new_from_json(simplejson.dumps(cred_entry['credential']))
+    return (key, credential)
+
+  def _write(self):
+    """Write the cached data back out.
+
+    The multistore must be locked.
+    """
+    raw_data = {'file_version': 1}
+    raw_creds = []
+    raw_data['data'] = raw_creds
+    for (cred_key, cred) in self._data.items():
+      raw_key = dict(cred_key)
+      raw_cred = simplejson.loads(cred.to_json())
+      raw_creds.append({'key': raw_key, 'credential': raw_cred})
+    self._locked_json_write(raw_data)
+
+  def _get_credential(self, key):
+    """Get a credential from the multistore.
+
+    The multistore must be locked.
+
+    Args:
+      key: The key used to retrieve the credential
+
+    Returns:
+      The credential specified or None if not present
+    """
+    return self._data.get(key, None)
+
+  def _update_credential(self, key, cred):
+    """Update a credential and write the multistore.
+
+    This must be called when the multistore is locked.
+
+    Args:
+      key: The key used to retrieve the credential
+      cred: The OAuth2Credential to update/set
+    """
+    self._data[key] = cred
+    self._write()
+
+  def _delete_credential(self, key):
+    """Delete a credential and write the multistore.
+
+    This must be called when the multistore is locked.
+
+    Args:
+      key: The key used to retrieve the credential
+    """
+    try:
+      del self._data[key]
+    except KeyError:
+      pass
+    self._write()
+
+  def _get_storage(self, key):
+    """Get a Storage object to get/set a credential.
+
+    This Storage is a 'view' into the multistore.
+
+    Args:
+      key: The key used to retrieve the credential
+
+    Returns:
+      A Storage object that can be used to get/set this cred
+    """
+    return self._Storage(self, key)
diff --git a/oauth2client/tools.py b/oauth2client/tools.py
new file mode 100644
index 0000000..93b0171
--- /dev/null
+++ b/oauth2client/tools.py
@@ -0,0 +1,205 @@
+# Copyright (C) 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command-line tools for authenticating via OAuth 2.0
+
+Do the OAuth 2.0 Web Server dance for a command line application. Stores the
+generated credentials in a common file that is used by other example apps in
+the same directory.
+"""
+
+__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__all__ = ['run']
+
+
+import BaseHTTPServer
+import gflags
+import socket
+import sys
+import webbrowser
+
+from oauth2client.client import FlowExchangeError
+from oauth2client.client import OOB_CALLBACK_URN
+from oauth2client import util
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+
+FLAGS = gflags.FLAGS
+
+gflags.DEFINE_boolean('auth_local_webserver', True,
+                      ('Run a local web server to handle redirects during '
+                       'OAuth authorization.'))
+
+gflags.DEFINE_string('auth_host_name', 'localhost',
+                     ('Host name to use when running a local web server to '
+                      'handle redirects during OAuth authorization.'))
+
+gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
+                        ('Port to use when running a local web server to '
+                         'handle redirects during OAuth authorization.'))
+
+
+class ClientRedirectServer(BaseHTTPServer.HTTPServer):
+  """A server to handle OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into query_params and then stops serving.
+  """
+  query_params = {}
+
+
+class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler for OAuth 2.0 redirects back to localhost.
+
+  Waits for a single request and parses the query parameters
+  into the servers query_params and then stops serving.
+  """
+
+  def do_GET(s):
+    """Handle a GET request.
+
+    Parses the query parameters and prints a message
+    if the flow has completed. Note that we can't detect
+    if an error occurred.
+    """
+    s.send_response(200)
+    s.send_header("Content-type", "text/html")
+    s.end_headers()
+    query = s.path.split('?', 1)[-1]
+    query = dict(parse_qsl(query))
+    s.server.query_params = query
+    s.wfile.write("<html><head><title>Authentication Status</title></head>")
+    s.wfile.write("<body><p>The authentication flow has completed.</p>")
+    s.wfile.write("</body></html>")
+
+  def log_message(self, format, *args):
+    """Do not log messages to stdout while running as command line program."""
+    pass
+
+
+@util.positional(2)
+def run(flow, storage, http=None):
+  """Core code for a command-line application.
+
+  The run() function is called from your application and runs through all the
+  steps to obtain credentials. It takes a Flow argument and attempts to open an
+  authorization server page in the user's default web browser. The server asks
+  the user to grant your application access to the user's data. If the user
+  grants access, the run() function returns new credentials. The new credentials
+  are also stored in the Storage argument, which updates the file associated
+  with the Storage object.
+
+  It presumes it is run from a command-line application and supports the
+  following flags:
+
+    --auth_host_name: Host name to use when running a local web server
+      to handle redirects during OAuth authorization.
+      (default: 'localhost')
+
+    --auth_host_port: Port to use when running a local web server to handle
+      redirects during OAuth authorization.;
+      repeat this option to specify a list of values
+      (default: '[8080, 8090]')
+      (an integer)
+
+    --[no]auth_local_webserver: Run a local web server to handle redirects
+      during OAuth authorization.
+      (default: 'true')
+
+  Since it uses flags make sure to initialize the gflags module before calling
+  run().
+
+  Args:
+    flow: Flow, an OAuth 2.0 Flow to step through.
+    storage: Storage, a Storage to store the credential in.
+    http: An instance of httplib2.Http.request
+         or something that acts like it.
+
+  Returns:
+    Credentials, the obtained credential.
+  """
+  if FLAGS.auth_local_webserver:
+    success = False
+    port_number = 0
+    for port in FLAGS.auth_host_port:
+      port_number = port
+      try:
+        httpd = ClientRedirectServer((FLAGS.auth_host_name, port),
+                                     ClientRedirectHandler)
+      except socket.error, e:
+        pass
+      else:
+        success = True
+        break
+    FLAGS.auth_local_webserver = success
+    if not success:
+      print 'Failed to start a local webserver listening on either port 8080'
+      print 'or port 9090. Please check your firewall settings and locally'
+      print 'running programs that may be blocking or using those ports.'
+      print
+      print 'Falling back to --noauth_local_webserver and continuing with',
+      print 'authorization.'
+      print
+
+  if FLAGS.auth_local_webserver:
+    oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
+  else:
+    oauth_callback = OOB_CALLBACK_URN
+  flow.redirect_uri = oauth_callback
+  authorize_url = flow.step1_get_authorize_url()
+
+  if FLAGS.auth_local_webserver:
+    webbrowser.open(authorize_url, new=1, autoraise=True)
+    print 'Your browser has been opened to visit:'
+    print
+    print '    ' + authorize_url
+    print
+    print 'If your browser is on a different machine then exit and re-run this'
+    print 'application with the command-line parameter '
+    print
+    print '  --noauth_local_webserver'
+    print
+  else:
+    print 'Go to the following link in your browser:'
+    print
+    print '    ' + authorize_url
+    print
+
+  code = None
+  if FLAGS.auth_local_webserver:
+    httpd.handle_request()
+    if 'error' in httpd.query_params:
+      sys.exit('Authentication request was rejected.')
+    if 'code' in httpd.query_params:
+      code = httpd.query_params['code']
+    else:
+      print 'Failed to find "code" in the query parameters of the redirect.'
+      sys.exit('Try running with --noauth_local_webserver.')
+  else:
+    code = raw_input('Enter verification code: ').strip()
+
+  try:
+    credential = flow.step2_exchange(code, http=http)
+  except FlowExchangeError, e:
+    sys.exit('Authentication has failed: %s' % e)
+
+  storage.put(credential)
+  credential.set_store(storage)
+  print 'Authentication successful.'
+
+  return credential
diff --git a/oauth2client/util.py b/oauth2client/util.py
new file mode 100644
index 0000000..ee6a100
--- /dev/null
+++ b/oauth2client/util.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Common utility library."""
+
+__author__ = ['rafek@google.com (Rafe Kaplan)',
+              'guido@google.com (Guido van Rossum)',
+]
+__all__ = [
+  'positional',
+]
+
+import gflags
+import inspect
+import logging
+import types
+import urllib
+import urlparse
+
+try:
+  from urlparse import parse_qsl
+except ImportError:
+  from cgi import parse_qsl
+
+logger = logging.getLogger(__name__)
+
+FLAGS = gflags.FLAGS
+
+gflags.DEFINE_enum('positional_parameters_enforcement', 'WARNING',
+    ['EXCEPTION', 'WARNING', 'IGNORE'],
+    'The action when an oauth2client.util.positional declaration is violated.')
+
+
+def positional(max_positional_args):
+  """A decorator to declare that only the first N arguments my be positional.
+
+  This decorator makes it easy to support Python 3 style key-word only
+  parameters. For example, in Python 3 it is possible to write:
+
+    def fn(pos1, *, kwonly1=None, kwonly1=None):
+      ...
+
+  All named parameters after * must be a keyword:
+
+    fn(10, 'kw1', 'kw2')  # Raises exception.
+    fn(10, kwonly1='kw1')  # Ok.
+
+  Example:
+    To define a function like above, do:
+
+      @positional(1)
+      def fn(pos1, kwonly1=None, kwonly2=None):
+        ...
+
+    If no default value is provided to a keyword argument, it becomes a required
+    keyword argument:
+
+      @positional(0)
+      def fn(required_kw):
+        ...
+
+    This must be called with the keyword parameter:
+
+      fn()  # Raises exception.
+      fn(10)  # Raises exception.
+      fn(required_kw=10)  # Ok.
+
+    When defining instance or class methods always remember to account for
+    'self' and 'cls':
+
+      class MyClass(object):
+
+        @positional(2)
+        def my_method(self, pos1, kwonly1=None):
+          ...
+
+        @classmethod
+        @positional(2)
+        def my_method(cls, pos1, kwonly1=None):
+          ...
+
+  The positional decorator behavior is controlled by the
+  --positional_parameters_enforcement flag. The flag may be set to 'EXCEPTION',
+  'WARNING' or 'IGNORE' to raise an exception, log a warning, or do nothing,
+  respectively, if a declaration is violated.
+
+  Args:
+    max_positional_arguments: Maximum number of positional arguments. All
+      parameters after the this index must be keyword only.
+
+  Returns:
+    A decorator that prevents using arguments after max_positional_args from
+    being used as positional parameters.
+
+  Raises:
+    TypeError if a key-word only argument is provided as a positional parameter,
+    but only if the --positional_parameters_enforcement flag is set to
+    'EXCEPTION'.
+  """
+  def positional_decorator(wrapped):
+    def positional_wrapper(*args, **kwargs):
+      if len(args) > max_positional_args:
+        plural_s = ''
+        if max_positional_args != 1:
+          plural_s = 's'
+        message = '%s() takes at most %d positional argument%s (%d given)' % (
+            wrapped.__name__, max_positional_args, plural_s, len(args))
+        if FLAGS.positional_parameters_enforcement == 'EXCEPTION':
+          raise TypeError(message)
+        elif FLAGS.positional_parameters_enforcement == 'WARNING':
+          logger.warning(message)
+        else: # IGNORE
+          pass
+      return wrapped(*args, **kwargs)
+    return positional_wrapper
+
+  if isinstance(max_positional_args, (int, long)):
+    return positional_decorator
+  else:
+    args, _, _, defaults = inspect.getargspec(max_positional_args)
+    return positional(len(args) - len(defaults))(max_positional_args)
+
+
+def scopes_to_string(scopes):
+  """Converts scope value to a string.
+
+  If scopes is a string then it is simply passed through. If scopes is an
+  iterable then a string is returned that is all the individual scopes
+  concatenated with spaces.
+
+  Args:
+    scopes: string or iterable of strings, the scopes.
+
+  Returns:
+    The scopes formatted as a single string.
+  """
+  if isinstance(scopes, types.StringTypes):
+    return scopes
+  else:
+    return ' '.join(scopes)
+
+
+def dict_to_tuple_key(dictionary):
+  """Converts a dictionary to a tuple that can be used as an immutable key.
+
+  The resulting key is always sorted so that logically equivalent dictionaries
+  always produce an identical tuple for a key.
+
+  Args:
+    dictionary: the dictionary to use as the key.
+
+  Returns:
+    A tuple representing the dictionary in it's naturally sorted ordering.
+  """
+  return tuple(sorted(dictionary.items()))
+
+
+def _add_query_parameter(url, name, value):
+  """Adds a query parameter to a url.
+
+  Replaces the current value if it already exists in the URL.
+
+  Args:
+    url: string, url to add the query parameter to.
+    name: string, query parameter name.
+    value: string, query parameter value.
+
+  Returns:
+    Updated query parameter. Does not update the url if value is None.
+  """
+  if value is None:
+    return url
+  else:
+    parsed = list(urlparse.urlparse(url))
+    q = dict(parse_qsl(parsed[4]))
+    q[name] = value
+    parsed[4] = urllib.urlencode(q)
+    return urlparse.urlunparse(parsed)
diff --git a/oauth2client/xsrfutil.py b/oauth2client/xsrfutil.py
new file mode 100644
index 0000000..7e1fe5c
--- /dev/null
+++ b/oauth2client/xsrfutil.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python2.5
+#
+# Copyright 2010 the Melange authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper methods for creating & verifying XSRF tokens."""
+
+__authors__ = [
+  '"Doug Coker" <dcoker@google.com>',
+  '"Joe Gregorio" <jcgregorio@google.com>',
+]
+
+
+import base64
+import hmac
+import os  # for urandom
+import time
+
+from oauth2client import util
+
+
+# Delimiter character
+DELIMITER = ':'
+
+# 1 hour in seconds
+DEFAULT_TIMEOUT_SECS = 1*60*60
+
+@util.positional(2)
+def generate_token(key, user_id, action_id="", when=None):
+  """Generates a URL-safe token for the given user, action, time tuple.
+
+  Args:
+    key: secret key to use.
+    user_id: the user ID of the authenticated user.
+    action_id: a string identifier of the action they requested
+      authorization for.
+    when: the time in seconds since the epoch at which the user was
+      authorized for this action. If not set the current time is used.
+
+  Returns:
+    A string XSRF protection token.
+  """
+  when = when or int(time.time())
+  digester = hmac.new(key)
+  digester.update(str(user_id))
+  digester.update(DELIMITER)
+  digester.update(action_id)
+  digester.update(DELIMITER)
+  digester.update(str(when))
+  digest = digester.digest()
+
+  token = base64.urlsafe_b64encode('%s%s%d' % (digest,
+                                               DELIMITER,
+                                               when))
+  return token
+
+
+@util.positional(3)
+def validate_token(key, token, user_id, action_id="", current_time=None):
+  """Validates that the given token authorizes the user for the action.
+
+  Tokens are invalid if the time of issue is too old or if the token
+  does not match what generateToken outputs (i.e. the token was forged).
+
+  Args:
+    key: secret key to use.
+    token: a string of the token generated by generateToken.
+    user_id: the user ID of the authenticated user.
+    action_id: a string identifier of the action they requested
+      authorization for.
+
+  Returns:
+    A boolean - True if the user is authorized for the action, False
+    otherwise.
+  """
+  if not token:
+    return False
+  try:
+    decoded = base64.urlsafe_b64decode(str(token))
+    token_time = long(decoded.split(DELIMITER)[-1])
+  except (TypeError, ValueError):
+    return False
+  if current_time is None:
+    current_time = time.time()
+  # If the token is too old it's not valid.
+  if current_time - token_time > DEFAULT_TIMEOUT_SECS:
+    return False
+
+  # The given token should match the generated one with the same time.
+  expected_token = generate_token(key, user_id, action_id=action_id,
+                                  when=token_time)
+  if len(token) != len(expected_token):
+    return False
+
+  # Perform constant time comparison to avoid timing attacks
+  different = 0
+  for x, y in zip(token, expected_token):
+    different |= ord(x) ^ ord(y)
+  if different:
+    return False
+
+  return True
diff --git a/php/sdk/google/appengine/api/app_identity/app_identity_service_pb.php b/php/sdk/google/appengine/api/app_identity/app_identity_service_pb.php
index 772feff..4662d02 100644
--- a/php/sdk/google/appengine/api/app_identity/app_identity_service_pb.php
+++ b/php/sdk/google/appengine/api/app_identity/app_identity_service_pb.php
@@ -958,3 +958,118 @@
     }
   }
 }
+namespace google\appengine {
+  class GetDefaultGcsBucketNameRequest extends \google\net\ProtocolMessage {
+    public function clear() {
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      return $res;
+    }
+    public function outputPartial($out) {
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      return $res;
+    }
+  }
+}
+namespace google\appengine {
+  class GetDefaultGcsBucketNameResponse extends \google\net\ProtocolMessage {
+    public function getDefaultGcsBucketName() {
+      if (!isset($this->default_gcs_bucket_name)) {
+        return '';
+      }
+      return $this->default_gcs_bucket_name;
+    }
+    public function setDefaultGcsBucketName($val) {
+      $this->default_gcs_bucket_name = $val;
+      return $this;
+    }
+    public function clearDefaultGcsBucketName() {
+      unset($this->default_gcs_bucket_name);
+      return $this;
+    }
+    public function hasDefaultGcsBucketName() {
+      return isset($this->default_gcs_bucket_name);
+    }
+    public function clear() {
+      $this->clearDefaultGcsBucketName();
+    }
+    public function byteSizePartial() {
+      $res = 0;
+      if (isset($this->default_gcs_bucket_name)) {
+        $res += 1;
+        $res += $this->lengthString(strlen($this->default_gcs_bucket_name));
+      }
+      return $res;
+    }
+    public function outputPartial($out) {
+      if (isset($this->default_gcs_bucket_name)) {
+        $out->putVarInt32(10);
+        $out->putPrefixedString($this->default_gcs_bucket_name);
+      }
+    }
+    public function tryMerge($d) {
+      while($d->avail() > 0) {
+        $tt = $d->getVarInt32();
+        switch ($tt) {
+          case 10:
+            $length = $d->getVarInt32();
+            $this->setDefaultGcsBucketName(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
+            break;
+          case 0:
+            throw new \google\net\ProtocolBufferDecodeError();
+            break;
+          default:
+            $d->skipData($tt);
+        }
+      };
+    }
+    public function checkInitialized() {
+      return null;
+    }
+    public function mergeFrom($x) {
+      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
+      if ($x->hasDefaultGcsBucketName()) {
+        $this->setDefaultGcsBucketName($x->getDefaultGcsBucketName());
+      }
+    }
+    public function equals($x) {
+      if ($x === $this) { return true; }
+      if (isset($this->default_gcs_bucket_name) !== isset($x->default_gcs_bucket_name)) return false;
+      if (isset($this->default_gcs_bucket_name) && $this->default_gcs_bucket_name !== $x->default_gcs_bucket_name) return false;
+      return true;
+    }
+    public function shortDebugString($prefix = "") {
+      $res = '';
+      if (isset($this->default_gcs_bucket_name)) {
+        $res .= $prefix . "default_gcs_bucket_name: " . $this->debugFormatString($this->default_gcs_bucket_name) . "\n";
+      }
+      return $res;
+    }
+  }
+}
diff --git a/php/sdk/google/appengine/api/memcache/memcache_service_pb.php b/php/sdk/google/appengine/api/memcache/memcache_service_pb.php
index a89a935..a2fc68a 100644
--- a/php/sdk/google/appengine/api/memcache/memcache_service_pb.php
+++ b/php/sdk/google/appengine/api/memcache/memcache_service_pb.php
@@ -19,7 +19,6 @@
 
 namespace dummy {
   require_once 'google/appengine/runtime/proto/ProtocolMessage.php';
-  require_once 'google/appengine/base/memcache_sharding_strategy_pb.php';
 }
 namespace google\appengine\MemcacheServiceError {
   class ErrorCode {
@@ -140,7 +139,7 @@
     }
     public function getMemcacheShardingStrategy() {
       if (!isset($this->memcache_sharding_strategy)) {
-        return 0;
+        return '';
       }
       return $this->memcache_sharding_strategy;
     }
@@ -181,7 +180,7 @@
       }
       if (isset($this->memcache_sharding_strategy)) {
         $res += 1;
-        $res += $this->lengthVarInt64($this->memcache_sharding_strategy);
+        $res += $this->lengthString(strlen($this->memcache_sharding_strategy));
       }
       return $res;
     }
@@ -203,8 +202,8 @@
         $out->putPrefixedString($this->memcache_pool_hint);
       }
       if (isset($this->memcache_sharding_strategy)) {
-        $out->putVarInt32(40);
-        $out->putVarInt32($this->memcache_sharding_strategy);
+        $out->putVarInt32(42);
+        $out->putPrefixedString($this->memcache_sharding_strategy);
       }
     }
     public function tryMerge($d) {
@@ -227,8 +226,10 @@
             $this->setMemcachePoolHint(substr($d->buffer(), $d->pos(), $length));
             $d->skip($length);
             break;
-          case 40:
-            $this->setMemcacheShardingStrategy($d->getVarInt32());
+          case 42:
+            $length = $d->getVarInt32();
+            $this->setMemcacheShardingStrategy(substr($d->buffer(), $d->pos(), $length));
+            $d->skip($length);
             break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
@@ -289,7 +290,7 @@
         $res .= $prefix . "memcache_pool_hint: " . $this->debugFormatString($this->memcache_pool_hint) . "\n";
       }
       if (isset($this->memcache_sharding_strategy)) {
-        $res .= $prefix . "memcache_sharding_strategy: " . ($this->memcache_sharding_strategy) . "\n";
+        $res .= $prefix . "memcache_sharding_strategy: " . $this->debugFormatString($this->memcache_sharding_strategy) . "\n";
       }
       return $res;
     }
diff --git a/php/sdk/google/appengine/api/taskqueue/PushQueue.php b/php/sdk/google/appengine/api/taskqueue/PushQueue.php
new file mode 100644
index 0000000..3313ce5
--- /dev/null
+++ b/php/sdk/google/appengine/api/taskqueue/PushQueue.php
@@ -0,0 +1,222 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * The PushQueue class, which is part of the Task Queue API.
+ *
+ */
+
+namespace google\appengine\api\taskqueue;
+
+require_once 'google/appengine/api/taskqueue/PushTask.php';
+require_once 'google/appengine/api/taskqueue/taskqueue_service_pb.php';
+require_once 'google/appengine/api/taskqueue/TaskAlreadyExistsException.php';
+require_once 'google/appengine/api/taskqueue/TaskQueueException.php';
+require_once 'google/appengine/api/taskqueue/TransientTaskQueueException.php';
+require_once 'google/appengine/runtime/ApiProxy.php';
+require_once 'google/appengine/runtime/ApplicationError.php';
+
+use \google\appengine\runtime\ApiProxy;
+use \google\appengine\runtime\ApplicationError;
+use \google\appengine\TaskQueueAddRequest;
+use \google\appengine\TaskQueueAddRequest\RequestMethod;
+use \google\appengine\TaskQueueAddResponse;
+use \google\appengine\TaskQueueBulkAddRequest;
+use \google\appengine\TaskQueueBulkAddResponse;
+use \google\appengine\TaskQueueServiceError\ErrorCode;
+
+
+/**
+ * A PushQueue executes PushTasks by sending the task back to the application
+ * in the form of an HTTP request to one of the application's handlers.
+ */
+final class PushQueue {
+  /**
+   * The maximum number of tasks in a single call addTasks.
+   */
+  const MAX_TASKS_PER_ADD = 100;
+
+  private $name;
+
+  private static $methods = [
+    'POST'   => RequestMethod::POST,
+    'GET'    => RequestMethod::GET,
+    'HEAD'   => RequestMethod::HEAD,
+    'PUT'    => RequestMethod::PUT,
+    'DELETE' => RequestMethod::DELETE
+  ];
+
+/**
+ * Construct a PushQueue
+ *
+ * @param string $name The name of the queue.
+ */
+  public function __construct($name = 'default') {
+    if (!is_string($name)) {
+      throw new \InvalidArgumentException(
+          '$name must be a string. Actual type: ' . gettype($name));
+    }
+    # TODO: validate queue name length and regex.
+    $this->name = $name;
+  }
+
+  /**
+   * Return the queue's name.
+   *
+   * @return string The queue's name.
+   */
+  public function getName() {
+    return $this->name;
+  }
+
+  private static function errorCodeToException($error) {
+    switch($error) {
+      case ErrorCode::UNKNOWN_QUEUE:
+        return new TaskQueueException('Unknown queue');
+      case ErrorCode::TRANSIENT_ERROR:
+        return new TransientTaskQueueException();
+      case ErrorCode::INTERNAL_ERROR:
+        return new TaskQueueException('Internal error');
+      case ErrorCode::TASK_TOO_LARGE:
+        return new TaskQueueException('Task too large');
+      case ErrorCode::INVALID_TASK_NAME:
+        return new TaskQueueException('Invalid task name');
+      case ErrorCode::INVALID_QUEUE_NAME:
+      case ErrorCode::TOMBSTONED_QUEUE:
+        return new TaskQueueException('Invalid queue name');
+      case ErrorCode::INVALID_URL:
+        return new TaskQueueException('Invalid URL');
+      case ErrorCode::PERMISSION_DENIED:
+        return new TaskQueueException('Permission Denied');
+
+      // Both TASK_ALREADY_EXISTS and TOMBSTONED_TASK are translated into the
+      // same exception. This is in keeping with the Java API but different to
+      // the Python API. Knowing that the task is tombstoned isn't particularly
+      // interesting: the main point is that it has already been added.
+      case ErrorCode::TASK_ALREADY_EXISTS:
+      case ErrorCode::TOMBSTONED_TASK:
+        return new TaskAlreadyExistsException();
+      case ErrorCode::INVALID_ETA:
+        return new TaskQueueException('Invalid delay_seconds');
+      case ErrorCode::INVALID_REQUEST:
+        return new TaskQueueException('Invalid request');
+      case ErrorCode::DUPLICATE_TASK_NAME:
+        return new TaskQueueException(
+            'Duplicate task names in addTasks request.');
+      case ErrorCode::TOO_MANY_TASKS:
+        return new TaskQueueException('Too many tasks in request.');
+      case ErrorCode::INVALID_QUEUE_MODE:
+        return new TaskQueueException('Cannot add a PushTask to a pull queue.');
+      default:
+        return new TaskQueueException('Error Code: ' . $error);
+    }
+  }
+
+  /**
+   * Add tasks to the queue.
+   *
+   * @param PushTask[] $tasks The tasks to be added to the queue.
+   *
+   * @return An array containing the name of each task added, with the same
+   * ordering as $tasks.
+   *
+   * @throws TaskAlreadyExistsException if a task of the same name already
+   * exists in the queue.
+   * If this exception is raised, the caller can be guaranteed that all tasks
+   * were successfully added either by this call or a previous call. Another way
+   * to express it is that, if any task failed to be added for a different
+   * reason, a different exception will be thrown.
+   * @throws TaskQueueException if there was a problem using the service.
+   */
+  public function addTasks($tasks) {
+    if (!is_array($tasks)) {
+      throw new \InvalidArgumentException(
+          '$tasks must be an array. Actual type: ' . gettype($tasks));
+    }
+    if (empty($tasks)) {
+      return [];
+    }
+    if (count($tasks) > self::MAX_TASKS_PER_ADD) {
+      throw new \InvalidArgumentException(
+          '$tasks must contain at most ' . self::MAX_TASKS_PER_ADD .
+          ' tasks. Actual size: ' . count($tasks));
+    }
+    $req = new TaskQueueBulkAddRequest();
+    $resp = new TaskQueueBulkAddResponse();
+
+    $names = [];
+    $current_time = microtime(true);
+    foreach ($tasks as $task) {
+      if (!($task instanceof PushTask)) {
+        throw new \InvalidArgumentException(
+            'All values in $tasks must be instances of PushTask. ' .
+            'Actual type: ' . gettype($task));
+      }
+      $names[] = $task->getName();
+      $add = $req->addAddRequest();
+      $add->setQueueName($this->name);
+      $add->setTaskName($task->getName());
+      $add->setEtaUsec(($current_time + $task->getDelaySeconds()) * 1e6);
+      $add->setMethod(self::$methods[$task->getMethod()]);
+      $add->setUrl($task->getUrl());
+      foreach ($task->getHeaders() as $header) {
+        $pair = explode(':', $header, 2);
+        $header_pb = $add->addHeader();
+        $header_pb->setKey(trim($pair[0]));
+        $header_pb->setValue(trim($pair[1]));
+      }
+      // TODO: Replace getQueryData() with getBody() and simplify the following
+      // block.
+      if ($task->getMethod() == 'POST' || $task->getMethod() == 'PUT') {
+        if ($task->getQueryData()) {
+          $add->setBody(http_build_query($task->getQueryData()));
+        }
+      }
+      if ($add->byteSizePartial() > PushTask::MAX_TASK_SIZE_BYTES) {
+        throw new TaskQueueException('Task greater than maximum size of ' .
+            PushTask::MAX_TASK_SIZE_BYTES . '. size: ' .
+            $add->byteSizePartial());
+      }
+    }
+
+    try {
+      ApiProxy::makeSyncCall('taskqueue', 'BulkAdd', $req, $resp);
+    } catch (ApplicationError $e) {
+      throw self::errorCodeToException($e->getApplicationError());
+    }
+
+    // Update $names with any generated task names. Also, check if there are any
+    // error responses.
+    $results = $resp->getTaskResultList();
+    $exception = null;
+    foreach ($results as $index => $task_result) {
+      if ($task_result->hasChosenTaskName()) {
+        $names[$index] = $task_result->getChosenTaskName();
+      }
+      if ($task_result->getResult() != ErrorCode::OK) {
+        $exception = self::errorCodeToException($task_result->getResult());
+        // Other exceptions take precedence over TaskAlreadyExistsException.
+        if (!($exception instanceof TaskAlreadyExistsException)) {
+          throw $exception;
+        }
+      }
+    }
+    if (isset($exception)) {
+      throw $exception;
+    }
+    return $names;
+  }
+}
diff --git a/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php b/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php
new file mode 100644
index 0000000..184b63f
--- /dev/null
+++ b/php/sdk/google/appengine/api/taskqueue/PushQueueTest.php
@@ -0,0 +1,292 @@
+<?php
+/**
+ * Copyright 2007 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Unit tests for the PushQueue class.
+ *
+ */
+
+namespace google\appengine\api\taskqueue;
+
+require_once 'google/appengine/api/taskqueue/PushQueue.php';
+require_once 'google/appengine/api/taskqueue/PushTask.php';
+require_once 'google/appengine/testing/ApiProxyTestBase.php';
+
+use google\appengine\api\taskqueue\PushTask;
+use google\appengine\testing\ApiProxyTestBase;
+use \google\appengine\TaskQueueAddRequest\RequestMethod;
+use \google\appengine\TaskQueueBulkAddRequest;
+use \google\appengine\TaskQueueBulkAddResponse;
+use \google\appengine\TaskQueueServiceError\ErrorCode;
+
+$mockTime = 12345.6;
+
+// This mocks out PHP's microtime() function.
+function microtime($get_as_float=false) {
+  if (!$get_as_float) {
+    die('microtime called with get_as_float=false');
+  }
+  global $mockTime;
+  return $mockTime;
+}
+
+class PushQueueTest extends ApiProxyTestBase {
+
+  public function setUp() {
+    parent::setUp();
+    $this->_SERVER = $_SERVER;
+  }
+
+  public function tearDown() {
+    $_SERVER = $this->_SERVER;
+    parent::tearDown();
+  }
+
+  private static function buildBulkAddRequest($queue_name = 'default') {
+    $req = new TaskQueueBulkAddRequest();
+    $task = $req->addAddRequest();
+    $task->setQueueName($queue_name);
+    $task->setTaskName('');
+    $task->setUrl('/someUrl');
+    global $mockTime;
+    $task->setEtaUsec($mockTime * 1e6);
+    $task->setMethod(RequestMethod::POST);
+    return $req;
+  }
+
+  private static function buildBulkAddRequestWithTwoTasks(
+      $queue_name = 'default') {
+    $req = self::buildBulkAddRequest($queue_name);
+
+    $task = $req->addAddRequest();
+    $task->setQueueName($queue_name);
+    $task->setTaskName('');
+    $task->setUrl('/someOtherUrl');
+    global $mockTime;
+    $task->setEtaUsec($mockTime * 1e6);
+    $task->setMethod(RequestMethod::POST);
+
+    return $req;
+  }
+
+  public function testConstructorNameWrongType() {
+    $this->setExpectedException('\InvalidArgumentException',
+        '$name must be a string. Actual type: integer');
+    $queue = new PushQueue(54321);
+  }
+
+  public function testGetName() {
+    $queue = new PushQueue();
+    $this->assertEquals('default', $queue->getName());
+    $queue = new PushQueue('fast-queue');
+    $this->assertEquals('fast-queue', $queue->getName());
+  }
+
+  public function testAddTaskTooBig() {
+    $this->setExpectedException(
+        '\google\appengine\api\taskqueue\TaskQueueException',
+        'Task greater than maximum size of ' . PushTask::MAX_TASK_SIZE_BYTES);
+    // Althought 102400 is the max size, it's for the serialized proto which
+    // includes the URL etc.
+    $task = new PushTask('/someUrl', ['field' => str_repeat('a', 102395)]);
+    (new PushQueue())->addTasks([$task]);
+  }
+
+  public function testPushQueueAddTasksWrongType() {
+    $this->setExpectedException('\InvalidArgumentException',
+        '$tasks must be an array. Actual type: string');
+    $queue = new PushQueue();
+    $task_names = $queue->addTasks('not an array');
+  }
+
+  public function testPushQueueAddTasksWrongValueType() {
+    $this->setExpectedException('\InvalidArgumentException',
+        'All values in $tasks must be instances of PushTask. ' .
+        'Actual type: double');
+    $queue = new PushQueue();
+    $task_names = $queue->addTasks([1.0]);
+  }
+
+  public function testPushQueueAddTasksTooMany() {
+    $this->setExpectedException('\InvalidArgumentException',
+        '$tasks must contain at most 100 tasks. Actual size: 101');
+    $tasks = [];
+    for ($i = 0; $i < 101; $i++) {
+      $tasks[] = new PushTask('/a-url');
+    }
+    $queue = new PushQueue();
+    $queue->addTasks($tasks);
+  }
+
+  public function testPushQueueAddTasksEmptyArray() {
+    $queue = new PushQueue();
+    $task_names = $queue->addTasks([]);
+    $this->assertEquals([], $task_names);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueSimplestAddTasks() {
+    $req = self::buildBulkAddRequest();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task = new PushTask('/someUrl');
+    $queue = new PushQueue();
+    $task_names = $queue->addTasks([$task]);
+    $this->assertEquals(['fred'], $task_names);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueAddTwoTasks() {
+    $req = self::buildBulkAddRequestWithTwoTasks();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('bob');
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue();
+    $task_names = $queue->addTasks([$task1, $task2]);
+    $this->assertEquals(['fred', 'bob'], $task_names);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueAddTwoTasksNonDefaultQueue() {
+    $req = self::buildBulkAddRequestWithTwoTasks('superQ');
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('bob');
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue('superQ');
+    $task_names = $queue->addTasks([$task1, $task2]);
+    $this->assertEquals(['fred', 'bob'], $task_names);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueTaskAlreadyExistsError() {
+    $req = self::buildBulkAddRequestWithTwoTasks();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::TOMBSTONED_TASK);
+    $task_result->setChosenTaskName('bob');
+
+    $this->setExpectedException(
+        '\google\appengine\api\taskqueue\TaskAlreadyExistsException');
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue();
+    $queue->addTasks([$task1, $task2]);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueUnknownQueueError() {
+    $req = self::buildBulkAddRequestWithTwoTasks();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::UNKNOWN_QUEUE);
+    $task_result->setChosenTaskName('bob');
+
+    $this->setExpectedException(
+        '\google\appengine\api\taskqueue\TaskQueueException',
+        'Unknown queue');
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue();
+    $queue->addTasks([$task1, $task2]);
+    $this->apiProxyMock->verify();
+  }
+
+  // UNKNOWN_QUEUE should take precedence over TOMBSTONED_TASK.
+  public function testPushQueueTwoErrors() {
+    $req = self::buildBulkAddRequestWithTwoTasks();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::TOMBSTONED_TASK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::UNKNOWN_QUEUE);
+    $task_result->setChosenTaskName('bob');
+
+    $this->setExpectedException(
+        '\google\appengine\api\taskqueue\TaskQueueException',
+        'Unknown queue');
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue();
+    $queue->addTasks([$task1, $task2]);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushQueueTooManyTasksError() {
+    $req = self::buildBulkAddRequestWithTwoTasks();
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::TOO_MANY_TASKS);
+    $task_result->setChosenTaskName('bob');
+
+    $this->setExpectedException(
+        '\google\appengine\api\taskqueue\TaskQueueException',
+        'Too many tasks in request.');
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task1 = new PushTask('/someUrl');
+    $task2 = new PushTask('/someOtherUrl');
+    $queue = new PushQueue();
+    $queue->addTasks([$task1, $task2]);
+    $this->apiProxyMock->verify();
+  }
+
+}
diff --git a/php/sdk/google/appengine/api/taskqueue/PushTask.php b/php/sdk/google/appengine/api/taskqueue/PushTask.php
index 5ef555e..4aca0e9 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushTask.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushTask.php
@@ -20,10 +20,9 @@
  */
 
 # Overview of TODOs(petermck) for building out the full Task Queue API:
-# - Support additional options for PushTasks, including headers, target,
-#   payload, and retry options.
-# - Add a PushQueue class which will support adding multiple tasks at once, plus
-#   various other queue level functionality such as FetchQueueStats.
+# - Support additional options for PushTasks, including retry options and maybe
+#   raw payloads.
+# - Support various queue level functionality such as FetchQueueStats.
 # - Add PullTask class.  At that point, perhaps refactor to use a Task
 #   baseclass to share code with PushTask.
 # - Add a PullQueue class, including pull specific queue methods such as
@@ -33,21 +32,10 @@
 
 namespace google\appengine\api\taskqueue;
 
+require_once 'google/appengine/api/taskqueue/PushQueue.php';
 require_once 'google/appengine/api/taskqueue/taskqueue_service_pb.php';
-require_once 'google/appengine/api/taskqueue/TaskAlreadyExistsException.php';
-require_once 'google/appengine/api/taskqueue/TaskQueueException.php';
-require_once 'google/appengine/api/taskqueue/TransientTaskQueueException.php';
-require_once 'google/appengine/runtime/ApiProxy.php';
-require_once 'google/appengine/runtime/ApplicationError.php';
 
-use \google\appengine\runtime\ApiProxy;
-use \google\appengine\runtime\ApplicationError;
-use \google\appengine\TaskQueueAddRequest;
 use \google\appengine\TaskQueueAddRequest\RequestMethod;
-use \google\appengine\TaskQueueAddResponse;
-use \google\appengine\TaskQueueBulkAddRequest;
-use \google\appengine\TaskQueueBulkAddResponse;
-use \google\appengine\TaskQueueServiceError\ErrorCode;
 
 
 /**
@@ -79,14 +67,17 @@
     'delay_seconds' => 0.0,
     'method'        => 'POST',
     'name'          => '',
+    'header'        => '',
   ];
 
-  private $url_path;
+  private $url;
 
   private $query_data;
 
   private $options;
 
+  private $headers = [];
+
   /**
    * Construct a PushTask.
    *
@@ -108,6 +99,8 @@
    *   will generate a unique task name.</li>
    *   <li>'delay_seconds': float The minimum time to wait before executing the
    *   task. Default: zero.</li>
+   *   <li>'header': string Additional headers to be sent when the task
+   *   executes.</li>
    * </ul>
    */
   public function __construct($url_path, $query_data=[], $options=[]) {
@@ -139,8 +132,6 @@
                                           implode(',', $extra_options));
     }
 
-    $this->url_path = $url_path;
-    $this->query_data = $query_data;
     $this->options = array_merge(self::$default_options, $options);
 
     if (!array_key_exists($this->options['method'], self::$methods)) {
@@ -174,15 +165,58 @@
           'delay_seconds must be between 0 and ' . self::MAX_DELAY_SECONDS .
           ' (30 days). delay_seconds: ' . $delay);
     }
+
+    $this->query_data = $query_data;
+    $this->url = $url_path;
+    if ($query_data) {
+      if (in_array($this->options['method'], ['GET', 'HEAD', 'DELETE'])) {
+        $this->url = $url_path . '?' . http_build_query($query_data);
+      } else { // PUT or POST
+        $this->headers[] = 'content-type: application/x-www-form-urlencoded';
+      }
+    }
+    if (strlen($this->url) > self::MAX_URL_LENGTH) {
+      throw new \InvalidArgumentException(
+          'URL length greater than maximum of ' .
+          PushTask::MAX_URL_LENGTH . '. URL: ' . $this->url);
+    }
+
+    // Handle user specified headers.
+    $header = $this->options['header'];
+    if (!is_string($header)) {
+      throw new \InvalidArgumentException('header must be a string. ' .
+          'Actual type: ' . gettype($header));
+    }
+
+    $has_content_type = !empty($this->headers);
+    $header_array = explode("\r\n", $header);
+    foreach ($header_array as $h) {
+      $h = trim($h);
+      if (empty($h)) {
+        continue;
+      }
+      if (strpos($h, ':') == false) {
+        throw new \InvalidArgumentException(
+            'Each header must contain a colon. Header: ' . $h);
+      }
+      if ($has_content_type &&
+          strncasecmp('content-type', $h, strlen('content-type')) == 0) {
+        throw new \InvalidArgumentException('Content-type header may not ' .
+            'be specified as it is set by the task.');
+        continue;
+      }
+      $this->headers[] = $h;
+    }
   }
 
   /**
-   * Return the task's URL path.
+   * Return the task's URL.  This will be the task's URL path, plus any query
+   * parameters if the task's method is GET, HEAD, or DELETE.
    *
    * @return string The task's URL path.
    */
-  public function getUrlPath() {
-    return $this->url_path;
+  public function getUrl() {
+    return $this->url;
   }
 
   /**
@@ -224,6 +258,19 @@
   }
 
   /**
+   * Return the task's headers.
+   *
+   * @return string[] The headers that will be sent when the task is
+   * executed. This list is not exhaustive as the backend may add more
+   * headers at execution time.
+   * The array is numerically indexed and of the same format as that returned
+   * by the standard headers_list() function.
+   */
+  public function getHeaders() {
+    return $this->headers;
+  }
+
+  /**
    * Adds the task to a queue.
    *
    * @param string $queue The name of the queue to add to. Defaults to
@@ -235,107 +282,8 @@
    * exists in the queue.
    * @throws TaskQueueException if there was a problem using the service.
    */
-  public function add($queue = 'default') {
-    if (!is_string($queue)) {
-      throw new \InvalidArgumentException('query must be a string.');
-    }
-    # TODO: validate queue name length and regex.
-    return self::addTasks([$this], $queue)[0];
-  }
-
-  private static function applicationErrorToException($error) {
-    switch($error->getApplicationError()) {
-      case ErrorCode::UNKNOWN_QUEUE:
-        return new TaskQueueException('Unknown queue');
-      case ErrorCode::TRANSIENT_ERROR:
-        return new TransientTaskQueueException();
-      case ErrorCode::INTERNAL_ERROR:
-        return new TaskQueueException('Internal error');
-      case ErrorCode::TASK_TOO_LARGE:
-        return new TaskQueueException('Task too large');
-      case ErrorCode::INVALID_TASK_NAME:
-        return new TaskQueueException('Invalid task name');
-      case ErrorCode::INVALID_QUEUE_NAME:
-      case ErrorCode::TOMBSTONED_QUEUE:
-        return new TaskQueueException('Invalid queue name');
-      case ErrorCode::INVALID_URL:
-        return new TaskQueueException('Invalid URL');
-      case ErrorCode::PERMISSION_DENIED:
-        return new TaskQueueException('Permission Denied');
-
-      // Both TASK_ALREADY_EXISTS and TOMBSTONED_TASK are translated into the
-      // same exception. This is in keeping with the Java API but different to
-      // the Python API. Knowing that the task is tombstoned isn't particularly
-      // interesting: the main point is that it has already been added.
-      case ErrorCode::TASK_ALREADY_EXISTS:
-      case ErrorCode::TOMBSTONED_TASK:
-        return new TaskAlreadyExistsException();
-      case ErrorCode::INVALID_ETA:
-        return new TaskQueueException('Invalid delay_seconds');
-      case ErrorCode::INVALID_REQUEST:
-        return new TaskQueueException('Invalid request');
-      case ErrorCode::INVALID_QUEUE_MODE:
-        return new TaskQueueException('Cannot add a PushTask to a pull queue.');
-      default:
-        return new TaskQueueException(
-            'Error Code: ' . $error->getApplicationError());
-    }
-  }
-
-  # TODO: Move this function into a PushQueue class when we have one.
-  # Returns an array containing the name of each task added.
-  private static function addTasks($tasks, $queue) {
-    $req = new TaskQueueBulkAddRequest();
-    $resp = new TaskQueueBulkAddResponse();
-
-    $names = [];
-    $current_time = microtime(true);
-    foreach ($tasks as $task) {
-      $names[] = $task->getName();
-      $add = $req->addAddRequest();
-      $add->setQueueName($queue);
-      $add->setTaskName($task->getName());
-      $add->setEtaUsec(($current_time + $task->getDelaySeconds()) * 1e6);
-      $add->setMethod(self::$methods[$task->getMethod()]);
-      if ($task->getMethod() == 'POST' || $task->getMethod() == 'PUT') {
-        $add->setUrl($task->getUrlPath());
-        if ($task->getQueryData()) {
-          $add->setBody(http_build_query($task->getQueryData()));
-          $header = $add->addHeader();
-          $header->setKey('content-type');
-          $header->setValue('application/x-www-form-urlencoded');
-        }
-      } else {
-        $url_path = $task->getUrlPath();
-        if ($task->getQueryData()) {
-          $url_path = $url_path . '?' .
-              http_build_query($task->getQueryData());
-        }
-        $add->setUrl($url_path);
-      }
-      if (strlen($add->getUrl()) > self::MAX_URL_LENGTH) {
-        throw new TaskQueueException('URL length greater than maximum of ' .
-            self::MAX_URL_LENGTH . '. URL: ' . $add->getUrl());
-      }
-      if ($add->byteSizePartial() > self::MAX_TASK_SIZE_BYTES) {
-        throw new TaskQueueException('Task greater than maximum size of ' .
-            self::MAX_TASK_SIZE_BYTES . '. size: ' . $add->byteSizePartial());
-      }
-    }
-
-    try {
-      ApiProxy::makeSyncCall('taskqueue', 'BulkAdd', $req, $resp);
-    } catch (ApplicationError $e) {
-      throw self::applicationErrorToException($e);
-    }
-
-    // Update $names with any generated task names.
-    $results = $resp->getTaskResultList();
-    foreach ($results as $index => $taskResult) {
-      if ($taskResult->hasChosenTaskName()) {
-        $names[$index] = $taskResult->getChosenTaskName();
-      }
-    }
-    return $names;
+  public function add($queue_name = 'default') {
+    $queue = new PushQueue($queue_name);
+    return $queue->addTasks([$this])[0];
   }
 }
diff --git a/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php b/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
index b13b8cb..d69e8a1 100644
--- a/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
+++ b/php/sdk/google/appengine/api/taskqueue/PushTaskTest.php
@@ -178,6 +178,36 @@
                          ['delay_seconds' => 30 * 86400]);
   }
 
+  public function testConstructorUrlTooBig() {
+    $url = '/' . str_repeat('b', 2083);
+    $this->setExpectedException(
+        '\InvalidArgumentException',
+        'URL length greater than maximum of ' . PushTask::MAX_URL_LENGTH) .
+        '. URL: ' . $url;
+    $t = new PushTask($url);
+  }
+
+  public function testConstructorHeaderWrongType() {
+    $this->setExpectedException('\InvalidArgumentException',
+        'header must be a string. Actual type: double');
+    $t = new PushTask('/some-url', ['user-key' => 'user-data'],
+        ['header' => 50.0]);
+  }
+
+  public function testConstructorHeaderWithoutColon() {
+    $this->setExpectedException('\InvalidArgumentException',
+        'Each header must contain a colon. Header: bad-header!');
+    $t = new PushTask('/some-url', ['user-key' => 'user-data'],
+        ['header' => 'bad-header!']);
+  }
+
+  public function testConstructorInvalidContentType() {
+    $this->setExpectedException('\InvalidArgumentException',
+        'Content-type header may not be specified as it is set by the task.');
+    $t = new PushTask('/some-url', ['user-key' => 'user-data'],
+        ['header' => 'content-type: application/pdf']);
+  }
+
   public function testAddInvalidQueue() {
     $this->setExpectedException('\InvalidArgumentException');
     (new PushTask('/someUrl'))->add(999);
@@ -192,15 +222,6 @@
     (new PushTask('/someUrl', ['field' => str_repeat('a', 102395)]))->add();
   }
 
-  public function testAddUrlTooBig() {
-    $url = '/' . str_repeat('b', 2083);
-    $this->setExpectedException(
-        '\google\appengine\api\taskqueue\TaskQueueException',
-        'URL length greater than maximum of ' . PushTask::MAX_URL_LENGTH) .
-        '. URL: ' . $url;
-    (new PushTask($url))->add();
-  }
-
   public function testPushTaskSimplestAdd() {
     $req = self::buildBulkAddRequest();
 
@@ -287,6 +308,73 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testPushTaskAddWithHeader() {
+    $req = self::buildBulkAddRequest();
+    $add_req = $req->getAddRequest(0);
+    $header = $add_req->addHeader();
+    $header->setKey('custom-header');
+    $header->setValue('54321');
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task_name = (new PushTask('/someUrl', [],
+        ['header' => 'custom-header: 54321']))->add();
+    $this->assertEquals('fred', $task_name);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushTaskAddWithHeaderAndQueryData() {
+    $query_data = ['key' => 'some value'];
+    $req = self::buildBulkAddRequest();
+    $add_req = $req->getAddRequest(0);
+    $add_req->setBody(http_build_query($query_data));
+
+    $header = $add_req->addHeader();
+    $header->setKey('content-type');
+    $header->setValue('application/x-www-form-urlencoded');
+    $header = $add_req->addHeader();
+    $header->setKey('custom-header');
+    $header->setValue('xyz');
+
+    $resp = new TaskQueueBulkAddResponse();
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    (new PushTask('/someUrl', $query_data,
+        ['header' => 'custom-header: xyz']))->add();
+    $this->apiProxyMock->verify();
+  }
+
+  public function testPushTaskAddWithTwoHeaders() {
+    $req = self::buildBulkAddRequest();
+    $add_req = $req->getAddRequest(0);
+    $header = $add_req->addHeader();
+    $header->setKey('custom-header');
+    $header->setValue('54321');
+    $header = $add_req->addHeader();
+    $header->setKey('another-custom-header');
+    $header->setValue('abc');
+
+
+    $resp = new TaskQueueBulkAddResponse();
+    $task_result = $resp->addTaskResult();
+    $task_result->setResult(ErrorCode::OK);
+    $task_result->setChosenTaskName('fred');
+
+    $this->apiProxyMock->expectCall('taskqueue', 'BulkAdd', $req, $resp);
+
+    $task_name = (new PushTask('/someUrl', [],
+        ['header' => "custom-header: 54321\r\n" .
+                     "another-custom-header: abc"]))->add();
+    $this->assertEquals('fred', $task_name);
+    $this->apiProxyMock->verify();
+  }
+
   public function testUnknownQueueError() {
     $req = self::buildBulkAddRequest();
     $exception = new \google\appengine\runtime\ApplicationError(
diff --git a/php/sdk/google/appengine/base/memcache_sharding_strategy_pb.php b/php/sdk/google/appengine/base/memcache_sharding_strategy_pb.php
deleted file mode 100644
index 3133809..0000000
--- a/php/sdk/google/appengine/base/memcache_sharding_strategy_pb.php
+++ /dev/null
@@ -1,66 +0,0 @@
-<?php
-/**
- * Copyright 2007 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: google/appengine/base/memcache_sharding_strategy.proto
-
-namespace dummy {
-  require_once 'google/appengine/runtime/proto/ProtocolMessage.php';
-}
-namespace google\appengine\MemcacheShardingStrategy {
-  class Enum {
-    const NAMESPACE_OR_KEY = 0;
-    const NAMESPACE_AND_KEY = 1;
-  }
-}
-namespace google\appengine {
-  class MemcacheShardingStrategy extends \google\net\ProtocolMessage {
-    public function clear() {
-    }
-    public function byteSizePartial() {
-      $res = 0;
-      return $res;
-    }
-    public function outputPartial($out) {
-    }
-    public function tryMerge($d) {
-      while($d->avail() > 0) {
-        $tt = $d->getVarInt32();
-        switch ($tt) {
-          case 0:
-            throw new \google\net\ProtocolBufferDecodeError();
-            break;
-          default:
-            $d->skipData($tt);
-        }
-      };
-    }
-    public function checkInitialized() {
-      return null;
-    }
-    public function mergeFrom($x) {
-      if ($x === $this) { throw new \IllegalArgumentException('Cannot copy message to itself'); }
-    }
-    public function equals($x) {
-      if ($x === $this) { return true; }
-      return true;
-    }
-    public function shortDebugString($prefix = "") {
-      $res = '';
-      return $res;
-    }
-  }
-}
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
index e635add..1353fb7 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageClient.php
@@ -43,6 +43,14 @@
  * perform.
  */
 abstract class CloudStorageClient {
+  // The default chunk size that we will read from the file. This value should
+  // remain smaller than the maximum object size valid for memcache writes so
+  // we can cache the reads.
+  const DEFAULT_READ_SIZE = 524288;
+
+  // The default amount of time that reads will be held in the cache
+  const DEFAULT_READ_CACHE_EXPIRY_SECONDS = 3600;  // one hour
+
   // Token scopers for accessing objects in Google Cloud Storage
   const READ_SCOPE = "https://www.googleapis.com/auth/devstorage.read_only";
   const WRITE_SCOPE = "https://www.googleapis.com/auth/devstorage.read_write";
@@ -63,14 +71,42 @@
   // Format for the OAuth token header.
   const OAUTH_TOKEN_FORMAT = "OAuth %s";
 
+  // Content Range Header format when the total length is unknown.
+  const PARTIAL_CONTENT_RANGE_FORMAT = "bytes %d-%d/*";
+
+  // Content Range Header format when the length is known.
+  const FINAL_CONTENT_RANGE_FORMAT = "bytes %d-%d/%d";
+
+  // Content Range Header for final chunk with no new data
+  const FINAL_CONTENT_RANGE_NO_DATA = "bytes */%d";
+
+  // A character or multiple characters that can be used to simplify a list of
+  // objects that use a directory-like naming scheme. Can be used in conjunction
+  // with a prefix.
+  const DELIMITER = '/';
+
+  // Cloud storage can append _$folder$ to an object name and have it behave
+  // like a regular file system folder.
+  const FOLDER_SUFFIX = '_$folder$';
+
   // Bit fields for the stat mode field
   const S_IFREG = 0100000;
   const S_IFDIR = 0040000;
 
-  const S_IRWXU = 00700;  //  mask for owner permissions
-  const S_IRUSR = 00400;  //  owner: read permission
-  const S_IWUSR = 00200;  //  owner: write permission
-  const S_IXUSR = 00100;  //  owner: execute permission
+  const S_IRWXU = 00700;  // mask for owner permissions
+  const S_IRUSR = 00400;  // read for owner
+  const S_IWUSR = 00200;  // write for owner
+  const S_IXUSR = 00100;  // execute for owner
+
+  const S_IRWXG = 00070;  // mask for group permissions
+  const S_IRGRP = 00040;  // read for group
+  const S_IWGRP = 00020;  // write for group
+  const S_IXGRP = 00010;  // execute for group
+
+  const S_IRWXO = 00007;  // mask for other other permissions
+  const S_IROTH = 00004;  // read for other
+  const S_IWOTH = 00002;  // write for other
+  const S_IXOTH = 00001;  // execute for other
 
   // The API version header
   private static $api_version_header = ["x-goog-api-version" => 2];
@@ -78,6 +114,15 @@
   // Regex patterm for retrieving the Length of the content being served.
   const CONTENT_RANGE_REGEX = "/bytes\s+(\d+)-(\d+)\/(\d+)/i";
 
+  /**
+   * Memcache key format for caching the results of reads from GCS. The
+   * parameters are the object url (as a string) and the read range, as a
+   * string (e.g. bytes=0-512000).
+   * Example key for a cloud storage file gs://bucket/object.png
+   *   _ah_gs_read_cache_https://storage.googleapis.com/bucket/object.png_bytes=0-524287
+   */
+  const MEMCACHE_KEY_FORMAT = "_ah_gs_read_cache_%s_%s";
+
   // HTTP status codes that should be retried if they are returned by a request
   // to GCS. Retry should occur with a random exponential back-off.
   protected static $retry_error_codes = [HttpResponse::REQUEST_TIMEOUT,
@@ -104,6 +149,12 @@
       "PATCH" => RequestMethod::PATCH
   ];
 
+  private static $default_gs_context_options = [
+      "enable_cache" => true,
+      "enable_optimistic_cache" => false,
+      "cache_expiry_seconds" => self::DEFAULT_READ_CACHE_EXPIRY_SECONDS,
+  ];
+
   protected $bucket_name;  // Name of the bucket for this object.
   protected $object_name;  // The name of the object.
   protected $context_options = [];  // Any context arguments supplied on open.
@@ -113,7 +164,10 @@
   /**
    * Construct an object of CloudStorageClient.
    *
-   * @
+   * @param string $bucket The name of the bucket.
+   * @param string $object The name of the object, or null if there is no
+   * object.
+   * @param resource $context The stream context to use.
    */
   public function __construct($bucket, $object = null, $context = null) {
     $this->bucket_name = $bucket;
@@ -123,7 +177,10 @@
     }
     $context_array = stream_context_get_options($context);
     if (array_key_exists("gs", $context_array)) {
-      $this->context_options = $context_array["gs"];
+      $this->context_options = array_merge(self::$default_gs_context_options,
+                                           $context_array["gs"]);
+    } else {
+      $this->context_options = self::$default_gs_context_options;
     }
     $this->anonymous = util\FindByKeyOrNull($this->context_options,
                                             "anonymous");
@@ -206,9 +263,11 @@
 
   /**
    * Create a URL for a target bucket and optional object.
+   *
+   * @visibleForTesting
    */
-  protected function createObjectUrl($bucket, $object) {
-    $host = $this->isDevelServer() ? self::LOCAL_HOST : self::PRODUCTION_HOST;
+  public static function createObjectUrl($bucket, $object = null) {
+    $host = self::isDevelServer() ? self::LOCAL_HOST : self::PRODUCTION_HOST;
     if (isset($object)) {
       return sprintf(self::BUCKET_OBJECT_FORMAT, $host, $bucket, $object);
     } else {
@@ -403,7 +462,7 @@
    *
    * @return bool True if running in the developement server, false otherwise.
    */
-  private function isDevelServer() {
+  private static function isDevelServer() {
     $server_software = getenv("SERVER_SOFTWARE");
     $key = "Development";
     return strncmp($server_software, $key, strlen($key)) === 0;
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
index d997228..c5e7be7 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageDirectoryClient.php
@@ -23,17 +23,14 @@
 namespace google\appengine\ext\cloud_storage_streams;
 
 require_once 'google/appengine/ext/cloud_storage_streams/CloudStorageClient.php';
+require_once 'google/appengine/util/string_util.php';
+
+use \google\appengine\util as util;
 
 /**
  * Client for deleting objects from Google Cloud Storage.
  */
 final class CloudStorageDirectoryClient extends CloudStorageClient {
-
-  // A character or multiple characters that can be used to simplify a list of
-  // objects that use a directory-like naming scheme. Can be used in conjunction
-  // with a prefix.
-  const DELIMITER = '/';
-
   // Maximum number of keys to return per call
   const MAX_KEYS = 1000;
 
@@ -50,9 +47,12 @@
   private $current_file_list = null;
 
   public function __construct($bucket_name, $object_prefix, $context) {
-    parent::__construct($bucket_name, null, $context);
+    parent::__construct($bucket_name, $object_prefix, $context);
     // Ignore the leading slash
     if (isset($object_prefix)) {
+      if (!util\endsWith($object_prefix, '/')) {
+        $object_prefix .= '/';
+      }
       $this->prefix = substr($object_prefix, 1);
     }
   }
@@ -60,8 +60,8 @@
   /**
    * Make the initial connection to GCS and fill the read buffer with files.
    *
-   * @return bool TRUE if we can connect to the Cloud Storage bucket, FALSE
-   * otherwise.
+   * @return bool <code>true</code> if we can connect to the Cloud Storage
+   * bucket, <code>false</code> otherwise.
    */
   public function initialise() {
     return $this->fillFileBuffer();
@@ -71,7 +71,7 @@
    * Read the next file in the directory list. If the list is empty and we
    * believe that there are more results to read then fetch them
    *
-   * @return string The name of the next file in the directory, FALSE if there
+   * @return string The name of the next file in the directory, false if there
    * are not more files.
    */
   public function dir_readdir() {
@@ -100,7 +100,7 @@
    * Rewind the directory handle to the first file that would have been returned
    * from opendir().
    *
-   * @return bool True is successful, False otherwise.
+   * @return bool <code>true</code> is successful, <code>false</code> otherwise.
    */
   public function dir_rewinddir() {
     // We could be more efficient if the user calls opendir() followed by
@@ -113,6 +113,106 @@
   public function close() {
   }
 
+  /**
+   * Make a 'directory' in Google Cloud Storage.
+   *
+   * @param mixed $options A bitwise mask of values, such as
+   * STREAM_MKDIR_RECURSIVE.
+   *
+   * @return bool <code>true</code> if the directory was created,
+   * <code>false</code> otherwise.
+   *
+   * TODO: If the STREAM_MKDIR_RECURSIVE bit is not set in the options then we
+   * should validate that the entire path exists before we create the directory.
+   */
+  public function mkdir($options) {
+    $report_errors = ($options | STREAM_REPORT_ERRORS) != 0;
+    $path = $this->getCorrectPathForDirectoryName();
+    $headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
+    if ($headers === false) {
+      if ($report_errors) {
+        trigger_error("Unable to acquire OAuth token.", E_USER_WARNING);
+      }
+      return false;
+    }
+
+    // Use x-goog-if-generation-match so we only create a new object.
+    $headers['x-goog-if-generation-match'] = 0;
+    $headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_NO_DATA, 0);
+    $url = $this->createObjectUrl($this->bucket_name, $path);
+    $http_response = $this->makeHttpRequest($url, "PUT", $headers);
+
+    if (false === $http_response) {
+      if ($report_errors) {
+        trigger_error("Unable to connect to Google Cloud Storage Service.",
+                      E_USER_WARNING);
+      }
+      return false;
+    }
+
+    // The status code precondition failed means that the 'directoy' already
+    // existed.
+    $status_code = $http_response['status_code'];
+    if ($status_code != HttpResponse::OK &&
+        $status_code != HttpResponse::PRECONDITION_FAILED) {
+      if ($report_errors) {
+        trigger_error($this->getErrorMessage($status_code,
+                                             $http_response['body']),
+                    E_USER_WARNING);
+      }
+      return false;
+    }
+    return ($status_code === HttpResponse::OK);
+  }
+
+  /**
+   * Attempts to remove the directory . The directory must be empty. A
+   * E_WARNING level error will be generated on failure.
+   *
+   * @param mixed $options A bitwise mask of values, such as
+   * STREAM_MKDIR_RECURSIVE.
+   *
+   * @return bool <code>true</code> if the directory was removed,
+   * <code>false</code> otherwise.
+   */
+  public function rmdir($options) {
+    // We need to check that the 'directory' is empty before we can unlink it.
+    // As we create a new instance of a CloudStorageDirectoryClient when
+    // performing a rmdir(), all we need to check is that a readdir() returns
+    // any value to know that the directory is not empty.
+    if ($this->dir_readdir() !== false) {
+      trigger_error('The directory is not empty.', E_USER_WARNING);
+      return false;
+    }
+
+    $headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
+    if ($headers === false) {
+      if ($report_errors) {
+        trigger_error("Unable to acquire OAuth token.", E_USER_WARNING);
+      }
+      return false;
+    }
+
+    $path = $this->getCorrectPathForDirectoryName();
+    $url = $this->createObjectUrl($this->bucket_name, $path);
+    $http_response = $this->makeHttpRequest($url, "DELETE", $headers);
+
+    if (false === $http_response) {
+      trigger_error("Unable to connect to Google Cloud Storage Service.",
+                    E_USER_WARNING);
+      return false;
+    }
+
+    if (HttpResponse::NO_CONTENT == $http_response['status_code']) {
+      return true;
+    } else {
+      trigger_error($this->getErrorMessage($http_response['status_code'],
+                                           $http_response['body']),
+                    E_USER_WARNING);
+      return false;
+    }
+  }
+
   private function fillFileBuffer() {
     $headers = $this->getOAuthTokenHeader(parent::READ_SCOPE);
     if ($headers === false) {
@@ -121,7 +221,7 @@
     }
 
     $query_arr = [
-        'delimiter' => self::DELIMITER,
+        'delimiter' => parent::DELIMITER,
         'max-keys' => self::MAX_KEYS,
     ];
     if (isset($this->prefix)) {
@@ -131,9 +231,8 @@
       $query_arr['marker'] = $this->next_marker;
     }
     $query_str = http_build_query($query_arr);
-    $http_response = $this->makeHttpRequest(sprintf("%s?%s",
-                                                    $this->url,
-                                                    $query_str),
+    $url = $this->createObjectUrl($this->bucket_name);
+    $http_response = $this->makeHttpRequest(sprintf("%s?%s", $url, $query_str),
                                             "GET",
                                             $headers);
 
@@ -163,10 +262,35 @@
       $this->current_file_list = [];
     }
 
+    $prefix_len = isset($this->prefix) ? strlen($this->prefix) : 0;
     foreach($xml->Contents as $content) {
-      array_push($this->current_file_list, (string) $content->Key);
+      $key = (string) $content->Key;
+      if ($prefix_len != 0) {
+        $key = substr($key, $prefix_len);
+      }
+      // If the key ends with FOLDER_SUFFIX then replace that value with a '/'
+      // to be consistent with the folder behaviour of Google Cloud Storage
+      // Manager, which supports the creating of 'folders' in the UI. See
+      // https://developers.google.com/storage/docs/gsmanager
+      if (util\endsWith($key, self::FOLDER_SUFFIX)) {
+        $key = substr_replace($key,
+                              parent::DELIMITER,
+                              -strlen(parent::FOLDER_SUFFIX));
+      }
+      array_push($this->current_file_list, $key);
     }
-
     return true;
   }
+
+  private function getCorrectPathForDirectoryName() {
+    // Replace the trailing MARKER from the prefix and replace it with the
+    // FOLDER_SUFFIX.
+    if (util\endsWith($this->object_name, parent::DELIMITER)) {
+      return substr_replace($this->object_name,
+                            parent::FOLDER_SUFFIX,
+                            -strlen(parent::DELIMITER));
+    } else {
+      return $this->object_name . parent::FOLDER_SUFFIX;
+    }
+  }
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
index e23f190..2861447 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
@@ -30,9 +30,6 @@
  * Google Cloud Storage Client for reading objects.
  */
 final class CloudStorageReadClient extends CloudStorageClient {
-  // The default chunk size that we will read from the file.
-  const DEFAULT_READ_SIZE = 524288;
-
   // Buffer for storing data.
   private $read_buffer;
 
@@ -64,8 +61,12 @@
                                         HttpResponse::PARTIAL_CONTENT,
                                         HttpResponse::RANGE_NOT_SATISFIABLE];
 
+  // Client for caching the results of GCS reads.
+  private $memcache_client;
+
   public function __construct($bucket, $object, $context) {
     parent::__construct($bucket, $object, $context);
+    $this->memcache_client = new \Memcache();
   }
 
   public function __destruct() {
@@ -182,6 +183,63 @@
   }
 
   /**
+   * Override the makeHttpRequest function so we can implement caching.
+   * If caching is enabled then we try and retrieve a matching request for the
+   * object name and range from memcache.
+   * If we find a result in memcache, and optimistic caching is enabled then
+   * we return that result immediately without checking if the object has
+   * changed in GCS. Otherwise, we will issue a 'If-None-Match' request with
+   * the ETag of the object to ensure it is still current.
+   *
+   * Optimisitic caching is best suited when the application is soley updating
+   * objects in cloud storage, as the cache can be invalidated when the object
+   * is updated by the application.
+   */
+  protected function makeHttpRequest($url, $method, $headers, $body = null) {
+    if (!$this->context_options['enable_cache']) {
+      return parent::makeHttpRequest($url, $method, $headers, $body);
+    }
+
+    $cache_key = sprintf(parent::MEMCACHE_KEY_FORMAT, $url, $headers['Range']);
+    $cache_obj = $this->memcache_client->get($cache_key);
+    if (false !== $cache_obj) {
+      if ($this->context_options['enable_optimistic_cache']) {
+        return $cache_obj;
+      } else {
+        $cache_etag = $this->getHeaderValue('ETag', $cache_obj['headers']);
+        if (array_key_exists('If-Match', $headers)) {
+          // We will perform a If-None-Match to validate the cache object, only
+          // if it has the same ETag value as what we are asking for.
+          if ($headers['If-Match'] === $cache_etag) {
+            unset($headers['If-Match']);
+          } else {
+            // We are asking for a different object that what is in the cache.
+            $cache_etag = null;
+          }
+        }
+      }
+      if (isset($cache_etag)) {
+        $headers['If-None-Match'] = $cache_etag;
+      }
+    }
+
+    $result = parent::makeHttpRequest($url, $method, $headers, $body);
+
+    if (false === $result) {
+      return false;
+    }
+    $status_code = $result['status_code'];
+    if (HttpResponse::NOT_MODIFIED === $result['status_code']) {
+      return $cache_obj;
+    }
+    if (in_array($status_code, self::$valid_status_codes)) {
+      $this->memcache_client->set($cache_key, $result, 0,
+          $this->context_options['cache_expiry_seconds']);
+    }
+    return $result;
+  }
+
+  /**
    * Fill our internal buffer with data, by making a http request to Google
    * Cloud Storage.
    */
@@ -192,7 +250,7 @@
       return false;
     }
 
-    $end_range = $read_position + self::DEFAULT_READ_SIZE - 1;
+    $end_range = $read_position + parent::DEFAULT_READ_SIZE - 1;
     $range = $this->getRangeHeader($read_position, $end_range);
     $headers = array_merge($headers, $range);
 
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
index d098222..f81f02d 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapper.php
@@ -48,7 +48,6 @@
   // constructing objects.
   public $context;
 
-  const ALLOWED_BUCKET_INI = "google_app_engine.allow_include_gs_buckets";
   const STREAM_OPEN_FOR_INCLUDE = 0x80;
   /**
    * Constructs a new stream wrapper.
@@ -63,6 +62,82 @@
   }
 
   /**
+   * Close an open directory handle.
+   */
+  public function dir_closedir() {
+    assert(isset($this->client));
+    $this->client->close();
+    $this->client = null;
+  }
+
+  /**
+   * Open a directory handle.
+   */
+  public function dir_opendir($path, $options) {
+    if (!$this->getBucketAndObjectFromPath($path, $bucket, $path)) {
+      trigger_error(sprintf("Invalid Google Cloud Storage path: %s", $path),
+                    E_USER_ERROR);
+      return false;
+    }
+
+    $this->client = new CloudStorageDirectoryClient($bucket,
+                                                    $path,
+                                                    $this->context);
+    return $this->client->initialise();
+  }
+
+  /**
+   * Read entry from the directory handle.
+   *
+   * @return string representing the next filename, of false if there is no
+   * next file.
+   */
+  public function dir_readdir() {
+    assert(isset($this->client));
+    return $this->client->dir_readdir();
+  }
+
+  /**
+   * Reset the output returned from dir_readdir.
+   *
+   * @return bool true if the stream can be rewound, false otherwise.
+   */
+  public function dir_rewinddir() {
+    assert(isset($this->client));
+    return $this->client->dir_rewinddir();
+  }
+
+  public function mkdir($path, $mode, $options) {
+    if (!$this->getBucketAndObjectFromPath($path, $bucket, $object) ||
+        !isset($object)) {
+      if (($options | STREAM_REPORT_ERRORS) != 0) {
+        trigger_error(sprintf("Invalid Google Cloud Storage path: %s", $path),
+                      E_USER_ERROR);
+      }
+      return false;
+    }
+    $client = new CloudStorageDirectoryClient($bucket,
+                                              $object,
+                                              $this->context);
+    return $client->mkdir($options);
+  }
+
+  public function rmdir($path, $options) {
+    if (!$this->getBucketAndObjectFromPath($path, $bucket, $object) ||
+        !isset($object)) {
+      if (($options | STREAM_REPORT_ERRORS) != 0) {
+        trigger_error(sprintf("Invalid Google Cloud Storage path: %s", $path),
+                      E_USER_ERROR);
+      }
+      return false;
+    }
+    $client = new CloudStorageDirectoryClient($bucket,
+                                              $object,
+                                              $this->context);
+    return $client->rmdir($options);
+  }
+
+  /**
    * Rename a cloud storage object.
    *
    * @return TRUE if the object was renamed, FALSE otherwise
@@ -137,7 +212,7 @@
     }
 
     if (($options & self::STREAM_OPEN_FOR_INCLUDE) != 0) {
-      $allowed_buckets = explode(",", ini_get(self::ALLOWED_BUCKET_INI));
+      $allowed_buckets = explode(",", GAE_INCLUDE_GS_BUCKETS);
       $include_allowed = false;
       foreach ($allowed_buckets as $bucket_name) {
         $bucket_name = trim($bucket_name);
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
index 793d0eb..e44f0cb 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
@@ -20,14 +20,36 @@
  */
 
 namespace {
-// Ignore calls to memcache from app identity service.
+// Mock Memcache class
 class Memcache {
+  // Mock object to validate calls to memcache
+  static $mock_memcache = null;
+
+  public static function setMockMemcache($mock) {
+    self::$mock_memcache = $mock;
+  }
   public function get($keys, $flags = null) {
-    return False;
+    return self::$mock_memcache->get($keys, $flags);
   }
   public function set($key, $value, $flag = null, $expire = 0) {
+    return self::$mock_memcache->set($key, $value, $flag, $expire);
   }
 }
+
+// Mock memcached class, used when invalidating cache entries on write.
+class Memcached {
+  // Mock object to validate calls to memcached
+  static $mock_memcached = null;
+
+  public static function setMockMemcached($mock) {
+    self::$mock_memcached = $mock;
+  }
+
+  public function deleteMulti($keys, $time = 0) {
+    self::$mock_memcached->deleteMulti($keys, $time);
+  }
+}
+
 }  // namespace
 
 namespace google\appengine\ext\cloud_storage_streams {
@@ -48,15 +70,6 @@
 use \google\appengine\ext\cloud_storage_streams\HttpResponse;
 use \google\appengine\URLFetchRequest\RequestMethod;
 
-// Allow mocking of ini_get by declaring it in the namespace.
-function ini_get($name) {
-  if ($name === "google_app_engine.allow_include_gs_buckets") {
-    return CloudStorageStreamWrapperTest::$allowed_gs_bucket;
-  } else {
-    return \ini_get($name);
-  }
-}
-
 class CloudStorageStreamWrapperTest extends ApiProxyTestBase {
 
   public static $allowed_gs_bucket = "";
@@ -71,7 +84,18 @@
 
     CloudStorageStreamWrapperTest::$allowed_gs_bucket = "";
 
+    // By default disable caching so we don't have to mock out memcache in
+    // every test
+    stream_context_set_default(['gs' => ['enable_cache' => false]]);
+
     date_default_timezone_set("UTC");
+
+    $this->mock_memcache = $this->getMock('\Memcache');
+    $this->mock_memcache_call_index = 0;
+    \Memcache::setMockMemcache($this->mock_memcache);
+
+    $this->mock_memcached = $this->getMock('\Memcached');
+    \Memcached::setMockMemcached($this->mock_memcached);
   }
 
   protected function tearDown() {
@@ -121,6 +145,137 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testReadObjectCacheHitSuccess() {
+    $body = "Hello from PHP";
+
+    // First call is to create the OAuth token.
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+
+    // Second call is to retrieve the cached read.
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+            'Content-Length' => strlen($body),
+            'ETag' => 'deadbeef',
+            'Content-Type' => 'text/plain',
+            'Last-Modified' => 'Mon, 02 Jul 2012 01:41:01 GMT',
+        ],
+        'body' => $body,
+    ];
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('get')
+                        ->with($this->stringStartsWith('_ah_gs_read_cache'))
+                        ->will($this->returnValue($response));
+
+    // We now expect a read request with If-None-Modified set to our etag.
+    $request_headers = [
+        'Authorization' => 'OAuth foo token',
+        'Range' => sprintf('bytes=%d-%d',
+                           0,
+                           CloudStorageReadClient::DEFAULT_READ_SIZE - 1),
+        'If-None-Match' => 'deadbeef',
+        'x-goog-api-version' => 2,
+    ];
+    $response = [
+        'status_code' => HttpResponse::NOT_MODIFIED,
+        'headers' => [
+        ],
+    ];
+
+    $expected_url = $this->makeCloudStorageObjectUrl();
+    $this->expectHttpRequest($expected_url,
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $options = [ 'gs' => [
+            'enable_cache' => true,
+            'enable_optimistic_cache' => false,
+        ]
+    ];
+    $ctx = stream_context_create($options);
+    $valid_path = "gs://bucket/object.png";
+    $data = file_get_contents($valid_path, false, $ctx);
+
+    $this->assertEquals($body, $data);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testReadObjectCacheWriteSuccess() {
+    $body = "Hello from PHP";
+
+    $this->expectFileReadRequest($body,
+                                 0,
+                                 CloudStorageReadClient::DEFAULT_READ_SIZE,
+                                 null);
+
+    // Don't read the page from the cache
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('get')
+                        ->with($this->stringStartsWith('_ah_gs_read_cache'))
+                        ->will($this->returnValue(false));
+
+    // Expect a write back to the cache
+    $cache_expiry_seconds = 60;
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('set')
+                        ->with($this->stringStartsWith('_ah_gs_read_cache'),
+                               $this->anything(),
+                               null,
+                               $cache_expiry_seconds)
+                        ->will($this->returnValue(false));
+
+
+    $options = [ 'gs' => [
+            'enable_cache' => true,
+            'enable_optimistic_cache' => false,
+            'cache_expiry_seconds' => $cache_expiry_seconds,
+        ]
+    ];
+    $ctx = stream_context_create($options);
+    $valid_path = "gs://bucket/object_name.png";
+    $data = file_get_contents($valid_path, false, $ctx);
+
+    $this->assertEquals($body, $data);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testReadObjectOptimisiticCacheHitSuccess() {
+    $body = "Hello from PHP";
+
+    // First call is to create the OAuth token.
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+
+    // Second call is to retrieve the cached read.
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+            'Content-Length' => strlen($body),
+            'ETag' => 'deadbeef',
+            'Content-Type' => 'text/plain',
+            'Last-Modified' => 'Mon, 02 Jul 2012 01:41:01 GMT',
+        ],
+        'body' => $body,
+    ];
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('get')
+                        ->with($this->stringStartsWith('_ah_gs_read_cache'))
+                        ->will($this->returnValue($response));
+
+    $options = [ 'gs' => [
+            'enable_cache' => true,
+            'enable_optimistic_cache' => true,
+        ]
+    ];
+    $ctx = stream_context_create($options);
+    $valid_path = "gs://bucket/object_name.png";
+    $data = file_get_contents($valid_path, false, $ctx);
+
+    $this->assertEquals($body, $data);
+    $this->apiProxyMock->verify();
+  }
+
   public function testReadObjectPartialContentResponseSuccess() {
     // GCS returns a 206 even if you can obtain all of the file in the first
     // read - this test simulates that behavior.
@@ -215,10 +370,7 @@
   public function testDeleteObjectSuccess() {
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 204,
         'headers' => [
@@ -238,10 +390,7 @@
   public function testDeleteObjectFail() {
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 404,
         'headers' => [
@@ -266,21 +415,23 @@
   }
 
   public function testStatBucketSuccess() {
-    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
-
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    $file_results = ['file1.txt', 'file2.txt'];
     $response = [
         'status_code' => 200,
         'headers' => [
         ],
+        'body' => $this->makeGetBucketXmlResponse("", $file_results),
     ];
-
     $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
-    $this->expectHttpRequest($expected_url,
-                             RequestMethod::HEAD,
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageClient::DELIMITER,
+        "max-keys" => CloudStorageUrlStatClient::MAX_KEYS,
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
                              $request_headers,
                              null,
                              $response);
@@ -290,48 +441,104 @@
   }
 
   public function testStatObjectSuccess() {
-    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
-
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    // Return the object we want in the second request so we test fetching from
+    // the marker to get all of the results
+    $last_modified = 'Mon, 01 Jul 2013 10:02:46 GMT';
+    $request_headers = $this->getStandardRequestHeaders();
+    $file_results = [
+        ['key' => 'object1.png', 'size' => '3337', 'mtime' => $last_modified],
     ];
     $response = [
         'status_code' => 200,
         'headers' => [
-            'Content-Length' => 37337,
         ],
+        'body' => $this->makeGetBucketXmlResponse("", $file_results, "foo"),
     ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        'delimiter' => CloudStorageClient::DELIMITER,
+        'max-keys' => CloudStorageUrlStatClient::MAX_KEYS,
+        'prefix' => 'object.png',
+    ]);
 
-    $expected_url = $this->makeCloudStorageObjectUrl();
-    $this->expectHttpRequest($expected_url,
-                             RequestMethod::HEAD,
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $file_results = [
+        ['key' => 'object.png', 'size' => '37337', 'mtime' => $last_modified],
+    ];
+    $response['body'] = $this->makeGetBucketXmlResponse("", $file_results);
+    $expected_query = http_build_query([
+        'delimiter' => CloudStorageClient::DELIMITER,
+        'max-keys' => CloudStorageUrlStatClient::MAX_KEYS,
+        'prefix' => 'object.png',
+        'marker' => 'foo',
+    ]);
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
                              $request_headers,
                              null,
                              $response);
 
     $result = stat("gs://bucket/object.png");
     $this->assertEquals(37337, $result['size']);
-    $this->assertEquals(0100400, $result['mode']);
+    $this->assertEquals(0100444, $result['mode']);
+    $this->assertEquals(strtotime($last_modified), $result['mtime']);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testStatObjectAsFolderSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    $last_modified = 'Mon, 01 Jul 2013 10:02:46 GMT';
+    $file_results = [
+        ['key' => 'a/b_$folder$', 'size' => '0', 'mtime' => $last_modified],
+    ];
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse('a/b', $file_results),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket', null);
+    $expected_query = http_build_query([
+        'delimiter' => CloudStorageClient::DELIMITER,
+        'max-keys' => CloudStorageUrlStatClient::MAX_KEYS,
+        'prefix' => 'a/b',
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->assertTrue(is_dir('gs://bucket/a/b/'));
     $this->apiProxyMock->verify();
   }
 
   public function testStatObjectFailed() {
-    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
-
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 404,
         'headers' => [
         ],
     ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        'delimiter' => CloudStorageClient::DELIMITER,
+        'max-keys' => CloudStorageUrlStatClient::MAX_KEYS,
+        'prefix' => 'object.png',
+    ]);
 
-    $expected_url = $this->makeCloudStorageObjectUrl();
-    $this->expectHttpRequest($expected_url,
-                             RequestMethod::HEAD,
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
                              $request_headers,
                              null,
                              $response);
@@ -352,10 +559,7 @@
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
     // First there is a stat
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 200,
         'headers' => [
@@ -394,10 +598,7 @@
                              $response);
 
     // Then we unlink the original.
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 204,
         'headers' => [
@@ -421,10 +622,7 @@
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
     // First there is a stat
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 404,
         'headers' => [
@@ -450,10 +648,7 @@
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
     // First there is a stat
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 200,
         'headers' => [
@@ -503,10 +698,7 @@
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
 
     // First there is a stat
-    $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 200,
         'headers' => [
@@ -546,10 +738,7 @@
                              $response);
 
     // Then we unlink the original.
-     $request_headers = [
-        "Authorization" => "OAuth foo token",
-        "x-goog-api-version" => 2,
-    ];
+    $request_headers = $this->getStandardRequestHeaders();
     $response = [
         'status_code' => 404,
         'headers' => [
@@ -571,35 +760,69 @@
   }
 
   public function testWriteObjectSuccess() {
-    $data_to_write = "Hello To PHP.";
-    $data_to_write_len = strlen($data_to_write);
+    $this->writeObjectSuccessWithMetadata("Hello To PHP.");
+  }
 
+  public function testWriteObjectWithMetadata() {
+    $metadata = ["foo" => "far", "bar" => "boo"];
+    $this->writeObjectSuccessWithMetadata("Goodbye To PHP.", $metadata);
+  }
+
+  private function writeObjectSuccessWithMetadata($data, $metadata = NULL) {
+    $data_len = strlen($data);
     $expected_url = $this->makeCloudStorageObjectUrl();
-
     $this->expectFileWriteStartRequest("text/plain",
                                        "public-read",
                                        "foo_upload_id",
-                                       $expected_url);
+                                       $expected_url,
+                                       $metadata);
 
     $this->expectFileWriteContentRequest($expected_url,
                                          "foo_upload_id",
-                                         $data_to_write,
+                                         $data,
                                          0,
-                                         $data_to_write_len - 1,
+                                         $data_len - 1,
                                          true);
-
-    stream_context_set_default([
+    $context = [
         "gs" => [
             "acl" => "public-read",
             "Content-Type" => "text/plain",
+            'enable_cache' => true,
         ],
-    ]);
-    $this->assertEquals($data_to_write_len,
-                        file_put_contents("gs://bucket/object.png",
-                                          $data_to_write));
+    ];
+    if (isset($metadata)) {
+      $context["gs"]["metadata"] = $metadata;
+    }
+
+    $range = sprintf("bytes=0-%d", CloudStorageClient::DEFAULT_READ_SIZE - 1);
+    $cache_key = sprintf(CloudStorageClient::MEMCACHE_KEY_FORMAT,
+                         $expected_url,
+                         $range);
+    $this->mock_memcached->expects($this->once())
+                         ->method('deleteMulti')
+                         ->with($this->identicalTo([$cache_key]));
+
+    stream_context_set_default($context);
+    $this->assertEquals($data_len,
+        file_put_contents("gs://bucket/object.png", $data));
     $this->apiProxyMock->verify();
   }
 
+  public function testWriteInvalidMetadata() {
+    $metadata = ["f o o" => "far"];
+    $context = [
+        "gs" => [
+            "acl" => "public-read",
+            "Content-Type" => "text/plain",
+            "metadata" => $metadata
+        ],
+    ];
+    stream_context_set_default($context);
+    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    file_put_contents("gs://bucket/object.png", "Some data");
+  }
+
   public function testWriteLargeObjectSuccess() {
     $data_to_write = str_repeat("1234567890", 100000);
     $data_len = strlen($data_to_write);
@@ -638,8 +861,23 @@
         "gs" => [
             "acl" => "public-read",
             "Content-Type" => "text/plain",
+            'enable_cache' => true,
         ],
     ];
+
+    $delete_keys = [];
+    for ($i = 0; $i < $data_len; $i += CloudStorageClient::DEFAULT_READ_SIZE) {
+      $range = sprintf("bytes=%d-%d",
+                       $i,
+                       $i + CloudStorageClient::DEFAULT_READ_SIZE - 1);
+      $delete_keys[] = sprintf(CloudStorageClient::MEMCACHE_KEY_FORMAT,
+                               $expected_url,
+                               $range);
+    }
+    $this->mock_memcached->expects($this->once())
+                         ->method('deleteMulti')
+                         ->with($this->identicalTo($delete_keys));
+
     $ctx = stream_context_create($file_context);
     $this->assertEquals($data_len,
                         file_put_contents("gs://bucket/object.png",
@@ -706,7 +944,7 @@
                                  CloudStorageReadClient::DEFAULT_READ_SIZE,
                                  null);
 
-    CloudStorageStreamWrapperTest::$allowed_gs_bucket = "foo, bucket, bar";
+    define("GAE_INCLUDE_GS_BUCKETS", "foo, bucket, bar");
     $valid_path = "gs://bucket/object_name.png";
     require $valid_path;
 
@@ -721,6 +959,283 @@
     $this->assertFalse(fopen("gs://"));
   }
 
+  public function testReaddirSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+
+    $request_headers = $this->getStandardRequestHeaders();
+    $file_results = ['f/file1.txt', 'f/file2.txt', 'f/sub_$folder$'];
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse("f/", $file_results),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "f/",
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $res = opendir("gs://bucket/f");
+    $this->assertEquals("file1.txt", readdir($res));
+    $this->assertEquals("file2.txt", readdir($res));
+    $this->assertEquals("sub/", readdir($res));
+    $this->assertFalse(readdir($res));
+    closedir($res);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testReaddirTruncatedSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    // First query with a truncated response
+    $response_body = "<?xml version='1.0' encoding='UTF-8'?>
+        <ListBucketResult xmlns='http://doc.s3.amazonaws.com/2006-03-01'>
+        <Name>sjl-test</Name>
+        <Prefix>f/</Prefix>
+        <Marker></Marker>
+        <NextMarker>AA</NextMarker>
+        <Delimiter>/</Delimiter>
+        <IsTruncated>true</IsTruncated>
+        <Contents>
+          <Key>f/file1.txt</Key>
+        </Contents>
+        <Contents>
+          <Key>f/file2.txt</Key>
+        </Contents>
+        </ListBucketResult>";
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $response_body,
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "f/",
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    // Second query with the remaining response
+    $response_body = "<?xml version='1.0' encoding='UTF-8'?>
+        <ListBucketResult xmlns='http://doc.s3.amazonaws.com/2006-03-01'>
+        <Name>sjl-test</Name>
+        <Prefix>f/</Prefix>
+        <Marker>AA</Marker>
+        <Delimiter>/</Delimiter>
+        <IsTruncated>false</IsTruncated>
+        <Contents>
+          <Key>f/file3.txt</Key>
+        </Contents>
+        <Contents>
+          <Key>f/file4.txt</Key>
+        </Contents>
+        </ListBucketResult>";
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $response_body,
+    ];
+
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "f/",
+        "marker" => "AA",
+    ]);
+
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $res = opendir("gs://bucket/f");
+    $this->assertEquals("file1.txt", readdir($res));
+    $this->assertEquals("file2.txt", readdir($res));
+    $this->assertEquals("file3.txt", readdir($res));
+    $this->assertEquals("file4.txt", readdir($res));
+    $this->assertFalse(readdir($res));
+    closedir($res);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testRewindDirSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse(
+            "f/",
+            ["f/file1.txt", "f/file2.txt"]),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "f/",
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+    // Expect the requests again when we rewinddir
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $res = opendir("gs://bucket/f");
+    $this->assertEquals("file1.txt", readdir($res));
+    rewinddir($res);
+    $this->assertEquals("file1.txt", readdir($res));
+    $this->assertEquals("file2.txt", readdir($res));
+    $this->assertFalse(readdir($res));
+    closedir($res);
+    $this->apiProxyMock->verify();
+  }
+
+  public function testInvalidPathForMkDir() {
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    $this->assertFalse(mkdir("gs://bucket_without_object/"));
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    $this->assertFalse(mkdir("gs://"));
+  }
+
+  public function testMkDirSuccess() {
+    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
+    $request_headers = [
+        "Authorization" => "OAuth foo token",
+        "x-goog-if-generation-match" => 0,
+        "Content-Range" => "bytes */0",
+        "x-goog-api-version" => 2,
+    ];
+
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+    ];
+
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket',
+                                                     '/dira/dirb_$folder$');
+    $this->expectHttpRequest($expected_url,
+                             RequestMethod::PUT,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->assertTrue(mkdir("gs://bucket/dira/dirb"));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testInvalidPathForRmDir() {
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    $this->assertFalse(rmdir("gs://bucket_without_object/"));
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    $this->assertFalse(rmdir("gs://"));
+  }
+
+  public function testRmDirSuccess() {
+    // Expect a request to list the contents of the bucket to ensure that it is
+    // empty.
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    // First query with a truncated response
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse("dira/dirb/", []),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "dira/dirb/",
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    // Expect the unlink request for the folder.
+    $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    $response = [
+        'status_code' => 204,
+        'headers' => [
+        ],
+    ];
+
+    $expected_url = $this->makeCloudStorageObjectUrl('bucket',
+                                                     '/dira/dirb_$folder$');
+    $this->expectHttpRequest($expected_url,
+                             RequestMethod::DELETE,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->assertTrue(rmdir("gs://bucket/dira/dirb"));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testRmDirNotEmpry() {
+    // Expect a request to list the contents of the bucket to ensure that it is
+    // empty.
+    $this->expectGetAccessTokenRequest(CloudStorageClient::READ_SCOPE);
+    $request_headers = $this->getStandardRequestHeaders();
+    // First query with a truncated response
+    $response = [
+        'status_code' => 200,
+        'headers' => [
+        ],
+        'body' => $this->makeGetBucketXmlResponse(
+            "dira/dirb/",
+            ["dira/dirb/file1.txt"]),
+    ];
+    $expected_url = $this->makeCloudStorageObjectUrl("bucket", null);
+    $expected_query = http_build_query([
+        "delimiter" => CloudStorageDirectoryClient::DELIMITER,
+        "max-keys" => CloudStorageDirectoryClient::MAX_KEYS,
+        "prefix" => "dira/dirb/",
+    ]);
+
+    $this->expectHttpRequest(sprintf("%s?%s", $expected_url, $expected_query),
+                             RequestMethod::GET,
+                             $request_headers,
+                             null,
+                             $response);
+
+    $this->setExpectedException("\PHPUnit_Framework_Error");
+    $this->assertFalse(rmdir("gs://bucket/dira/dirb"));
+    $this->apiProxyMock->verify();
+  }
+
   private function expectFileReadRequest($body,
                                          $start_byte,
                                          $length,
@@ -778,6 +1293,19 @@
                                     'GetAccessToken',
                                     $req,
                                     $resp);
+
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('get')
+                        ->with($this->stringStartsWith('_ah_app_identity'))
+                        ->will($this->returnValue(false));
+
+    $this->mock_memcache->expects($this->at($this->mock_memcache_call_index++))
+                        ->method('set')
+                        ->with($this->stringStartsWith('_ah_app_identity'),
+                               $this->anything(),
+                               $this->anything(),
+                               $this->anything())
+                        ->will($this->returnValue(false));
   }
 
   private function createSuccessfulGetHttpResponse($headers,
@@ -827,7 +1355,11 @@
     ];
   }
 
-  private function expectFileWriteStartRequest($content_type, $acl, $id, $url) {
+  private function expectFileWriteStartRequest($content_type,
+                                               $acl,
+                                               $id,
+                                               $url,
+                                               $metadata = NULL) {
     $this->expectGetAccessTokenRequest(CloudStorageClient::WRITE_SCOPE);
     $upload_id =  "https://host/bucket/object.png?upload_id=" . $id;
     // The upload will start with a POST to acquire the upload ID.
@@ -836,8 +1368,13 @@
         "Authorization" => "OAuth foo token",
         "Content-Type" => $content_type,
         "x-goog-acl" => $acl,
-        "x-goog-api-version" => 2,
     ];
+    if (isset($metadata)) {
+      foreach ($metadata as $key => $value) {
+        $request_headers["x-goog-meta-" . $key] = $value;
+      }
+    }
+    $request_headers["x-goog-api-version"] = 2;
     $response = [
         'status_code' => 201,
         'headers' => [
@@ -942,6 +1479,42 @@
                      $bucket);
     }
   }
+
+  private function getStandardRequestHeaders() {
+    return [
+        "Authorization" => "OAuth foo token",
+        "x-goog-api-version" => 2,
+    ];
+  }
+
+  private function makeGetBucketXmlResponse($prefix,
+                                            $contents_array,
+                                            $next_marker = null) {
+    $result = "<?xml version='1.0' encoding='UTF-8'?>
+        <ListBucketResult xmlns='http://doc.s3.amazonaws.com/2006-03-01'>
+        <Name>sjl-test</Name>
+        <Prefix>" . $prefix . "</Prefix>
+        <Marker></Marker>";
+    if (isset($next_marker)) {
+      $result .= "<NextMarker>" . $next_marker . "</NextMarker>";
+    }
+    $result .= "<Delimiter>/</Delimiter>
+        <IsTruncated>false</IsTruncated>";
+
+    foreach($contents_array as $content) {
+      $result .= '<Contents>';
+      if (is_string($content)) {
+        $result .= '<Key>' . $content . '</Key>';
+      } else {
+        $result .= '<Key>' . $content['key'] . '</Key>';
+        $result .= '<Size>' . $content['size'] . '</Size>';
+        $result .= '<LastModified>' . $content['mtime'] . '</LastModified>';
+      }
+      $result .= '</Contents>';
+    }
+    $result .= "</ListBucketResult>";
+    return $result;
+  }
 }
 
 }  // namespace google\appengine\ext\cloud_storage_streams;
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
index ccbafaa..dae2769 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageUrlStatClient.php
@@ -22,20 +22,116 @@
 namespace google\appengine\ext\cloud_storage_streams;
 
 require_once 'google/appengine/ext/cloud_storage_streams/CloudStorageClient.php';
+require_once 'google/appengine/util/string_util.php';
+
+use \google\appengine\util as util;
 
 /**
- * Client for deleting objects from Google Cloud Storage.
+ * Client for stating objects in Google Cloud Storage.
  */
 final class CloudStorageUrlStatClient extends CloudStorageClient {
+  // Maximum number of keys to return when querying a bucket.
+  const MAX_KEYS = 1000;
+
   private $quiet;
+  private $prefix = null;
+  private $next_marker = null;
 
   public function __construct($bucket, $object, $context, $flags) {
     parent::__construct($bucket, $object, $context);
     $this->quiet = ($flags & STREAM_URL_STAT_QUIET) == STREAM_URL_STAT_QUIET;
+    if (isset($object)) {
+      // Drop the leading '/' from the object name.
+      $this->prefix = substr($object, 1);
+    }
   }
 
+  /**
+   * The stat function uses GET requests to the bucket to try and determine if
+   * the object is a 'file' or a 'directory', by listing the contents of the
+   * bucket and then matching the results against the supplied object name.
+   *
+   * If a file ends with "_$folder$" then Google Cloud Storage Manager will
+   * show it as a 'folder' in the UI tool, so we consider an object that ends
+   * in "_$folder$" as a directory as well.
+   */
   public function stat() {
-    $headers = $this->getOAuthTokenHeader(parent::WRITE_SCOPE);
+    $prefix = $this->prefix;
+    if (util\endsWith($prefix, parent::DELIMITER)) {
+      $prefix = substr($prefix, 0, strlen($prefix) - 1);
+    }
+
+    if (isset($prefix)) {
+      while (!isset($mode)) {
+        $results = $this->makeRequest($prefix);
+        if (false === $results) {
+          return false;
+        }
+        // If there are no results then we're done
+        if (empty($results)) {
+          return false;
+        }
+        // If there is an entry in $results that contains the object_name
+        // exactly then we have a matching file - If there is an entry that
+        // contains object_name_$folder$ or object_name/ then we have a
+        // 'directory'
+        $object_name_folder = $prefix . parent::FOLDER_SUFFIX;
+        $object_name_delimiter = $prefix . parent::DELIMITER;
+        foreach ($results as $result) {
+          if ($result['name'] === $prefix) {
+            $mode = parent::S_IFREG;
+            $mtime = $result['mtime'];
+            $size = $result['size'];
+            break;
+          } else if ($result['name'] === $object_name_folder ||
+                     strncmp($result['name'],
+                             $object_name_delimiter,
+                             strlen($object_name_delimiter)) == 0) {
+            $mode = parent::S_IFDIR;
+            break;
+          }
+        }
+      }
+    } else {
+      // We are now just checking that the bucket exists, as there was no
+      // object prefix supplied
+      $results = $this->makeRequest();
+      if ($results !== false) {
+        $mode = parent::S_IFDIR;
+      } else {
+        return false;
+      }
+    }
+    // If mode is not set, then there was no object that matched the criteria.
+    if (!isset($mode)) {
+      return false;
+    }
+    // If the app could stat the file, then it must be readable. As different
+    // PHP internal APIs check the access mode, we'll set them all to readable.
+    $mode |= parent::S_IRUSR | parent::S_IRGRP | parent::S_IROTH;
+
+    $stat_args["mode"] = $mode;
+    if (isset($mtime)) {
+      $unix_time = strtotime($mtime);
+      if ($unix_time !== false) {
+        $stat_args["mtime"] = $unix_time;
+      }
+    }
+
+    if (isset($size)) {
+      $stat_args["size"] = intval($size);
+    }
+    return $this->createStatArray($stat_args);
+  }
+
+  /**
+   * Perform a GET request on a bucket, with the optional $object_prefix. This
+   * is similar to how CloudStorgeDirectoryClient works, except that it is
+   * targeting a specific file rather than trying to enumerate of the files in
+   * a given bucket with a common prefix.
+   */
+  private function makeRequest($object_prefix = null) {
+    $headers = $this->getOAuthTokenHeader(parent::READ_SCOPE);
     if ($headers === false) {
       if (!$this->quiet) {
         trigger_error("Unable to acquire OAuth token.", E_USER_WARNING);
@@ -43,46 +139,57 @@
       return false;
     }
 
-    $http_response = $this->makeHttpRequest($this->url, "HEAD", $headers);
+    $query_arr = [
+        'delimiter' => parent::DELIMITER,
+        'max-keys' => self::MAX_KEYS,
+    ];
+    if (isset($object_prefix)) {
+      $query_arr['prefix'] = $object_prefix;
+    }
+    if (isset($this->next_marker)) {
+      $query_arr['marker'] = $this->next_marker;
+    }
+
+    $url = $this->createObjectUrl($this->bucket_name);
+    $query_str = http_build_query($query_arr);
+    $http_response = $this->makeHttpRequest(sprintf("%s?%s", $url, $query_str),
+                                            "GET",
+                                            $headers);
     if ($http_response === false) {
       if (!$this->quiet) {
+        trigger_error('Unable to connect to the Cloud Storage Service.',
+                      E_USER_WARNING);
+      }
+      return false;
+    }
+
+    if (HttpResponse::OK !== $http_response['status_code']) {
+      if (!$this->quiet) {
         trigger_error($this->getErrorMessage($http_response['status_code'],
                                              $http_response['body']),
                       E_USER_WARNING);
       }
       return false;
     }
-    $status_code = $http_response['status_code'];
 
-    // TODO: Implement retry.
-    if ($status_code != HttpResponse::OK) {
-      return false;
+    // Extract the files into the result array.
+    $xml = simplexml_load_string($http_response['body']);
+
+    if (isset($xml->NextMarker)) {
+      $this->next_marker = (string) $xml->NextMarker;
+    } else {
+      $this->next_marker = null;
     }
 
-    $mode = isset($this->object_name) ? parent::S_IFREG : parent::S_IFDIR;
-
-    // If the app could stat the file, then it must be readable.
-    $mode |= parent::S_IRUSR;
-
-    $stat_args["mode"] = $mode;
-
-    $last_modified = $this->getHeaderValue('Last-Modified',
-                                           $http_response['headers']);
-    if (isset($last_modified)) {
-      $unix_time = strtotime($last_modified);
-      if ($unix_time !== false) {
-        $stat_args["mtime"] = $unix_time;
-      }
+    $results = [];
+    foreach($xml->Contents as $content) {
+      $results [] = [
+          'name' => (string) $content->Key,
+          'size' => (string) $content->Size,
+          'mtime' => (string) $content->LastModified,
+      ];
     }
 
-    $content_length = $this->getHeaderValue('Content-Length',
-                                            $http_response['headers']);
-
-    if (isset($content_length)) {
-      $stat_args["size"] = intval($content_length);
-    }
-
-    return $this->createStatArray($stat_args);
+    return $results;
   }
 }
-
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
index 64f6c1b..e070b74 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageWriteClient.php
@@ -31,14 +31,12 @@
   // the last chunk.
   const WRITE_CHUNK_SIZE = 262144;
 
-  // Content Range Header format when the total length is unknown.
-  const PARTIAL_CONTENT_RANGE_FORMAT = "bytes %d-%d/*";
+  // Conservative pattern for metadata headers name - could be relaxed
+  const METADATA_KEY_REGEX = "/^[[:alnum:]-]+$/";
 
-  // Content Range Header format when the length is known.
-  const FINAL_CONTENT_RANGE_FORMAT = "bytes %d-%d/%d";
-
-  // Content Range Header for final chunk with no new data
-  const FINAL_CONTENT_RANGE_NO_DATA = "bytes */%d";
+  // Metadata header value must be printable US ascii
+  // http://tools.ietf.org/html/rfc2616#section-4.2
+  const METADATA_VALUE_REGEX = "/^[[:print:]]*$/";
 
   private static $upload_start_header = ["x-goog-resumable" => "start"];
 
@@ -88,6 +86,23 @@
       }
     }
 
+    if (array_key_exists("metadata", $this->context_options)) {
+      $metadata = $this->context_options["metadata"];
+      foreach ($metadata as $name => $value) {
+        if (!preg_match(self::METADATA_KEY_REGEX, $name)) {
+          trigger_error(sprintf("Invalid metadata key: %s", $name),
+              E_USER_WARNING);
+          return false;
+        }
+        if (!preg_match(self::METADATA_KEY_REGEX, $value)) {
+          trigger_error(sprintf("Invalid metadata value: %s", $value),
+              E_USER_WARNING);
+          return false;
+        }
+        $headers['x-goog-meta-' . $name] = $value;
+      }
+    }
+
     $http_response = $this->makeHttpRequest($this->url,
                                             "POST",
                                             $headers);
@@ -201,16 +216,16 @@
     if ($complete) {
       $object_length = $this->buffer_start_offset + $write_size;
       if ($write_size === 0) {
-        $headers['Content-Range'] = sprintf(self::FINAL_CONTENT_RANGE_NO_DATA,
+        $headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_NO_DATA,
                                             $object_length);
       } else {
-        $headers['Content-Range'] = sprintf(self::FINAL_CONTENT_RANGE_FORMAT,
+        $headers['Content-Range'] = sprintf(parent::FINAL_CONTENT_RANGE_FORMAT,
                                             $this->buffer_start_offset,
                                             $write_end_byte,
                                             $object_length);
       }
     } else {
-      $headers['Content-Range'] = sprintf(self::PARTIAL_CONTENT_RANGE_FORMAT,
+      $headers['Content-Range'] = sprintf(parent::PARTIAL_CONTENT_RANGE_FORMAT,
                                           $this->buffer_start_offset,
                                           $write_end_byte);
     }
@@ -232,6 +247,24 @@
       $this->buffer_start_offset = $write_end_byte + 1;
       $this->byte_buffer = substr($this->byte_buffer, $write_size);
     }
+    // Invalidate any cached object with the same name. Note that there is a
+    // potential race condition when using optimistic caching and invalidate
+    // on write where the old version of an object can still be returned from
+    // the cache.
+    if ($complete && $this->context_options['enable_cache'] === true) {
+      if ($object_length > 0) {
+        $key_names = [];
+        for ($i = 0; $i < $object_length; $i += parent::DEFAULT_READ_SIZE) {
+          $range = $this->getRangeHeader($i,
+                                         $i + parent::DEFAULT_READ_SIZE - 1);
+          $key_names[] = sprintf(parent::MEMCACHE_KEY_FORMAT,
+                                 $this->url,
+                                 $range['Range']);
+        }
+        $memcached = new \Memcached();
+        $memcached->deleteMulti($key_names);
+      }
+    }
     return true;
   }
 }
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/HttpResponse.php b/php/sdk/google/appengine/ext/cloud_storage_streams/HttpResponse.php
index 6e253fa..c6dcedd 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/HttpResponse.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/HttpResponse.php
@@ -30,6 +30,7 @@
   const NO_CONTENT = 204;
   const PARTIAL_CONTENT = 206;
 
+  const NOT_MODIFIED = 304;
   const RESUME_INCOMPLETE = 308;
 
   const BAD_REQUEST = 400;
@@ -50,6 +51,7 @@
     self::CREATED => "CREATE",
     self::NO_CONTENT => "NO CONTENT",
     self::PARTIAL_CONTENT => "PARTIAL CONTENT",
+    self::NOT_MODIFIED => "NOT MODIFIED",
     self::RESUME_INCOMPLETE => "RESUME INCOMPLETE",
     self::BAD_REQUEST => "BAD REQUEST",
     self::UNAUTHORIZED => "UNAUTHORIZED",
diff --git a/php/sdk/google/appengine/util/string_util.php b/php/sdk/google/appengine/util/string_util.php
index 05110ae..73fe24d 100644
--- a/php/sdk/google/appengine/util/string_util.php
+++ b/php/sdk/google/appengine/util/string_util.php
@@ -21,13 +21,25 @@
 namespace google\appengine\util;
 
 /**
- * Return true if the first paramater contains the second parameter at the end
+ * Return true if the first paramater contains the second parameter at the end.
  *
- * @param string $input The input string which may contain the suffix
- * @param string $suffix The string to look for at the end of the input
+ * @param string $input The input string which may contain the suffix.
+ * @param string $suffix The string to look for at the end of the input.
  *
- * @return true if the input contains the suffix at the end or false otherwise
+ * @return boolean <code>true</code> iff the input contains the suffix at the
+ * end.
  */
 function endsWith($input, $suffix) {
   return substr($input, -strlen($suffix)) === $suffix;
+}
+
+/**
+ * @param string $input The string which may contain the prefix at the start.
+ * @param string $prefix The string to look for at the start of the input.
+ *
+ * @return boolean <code>true</code> iff the input contains the prefix at the
+ * start.
+ */
+function startsWith($input, $prefix) {
+  return substr($input, 0, strlen($prefix)) === $prefix;
 }
\ No newline at end of file
diff --git a/uritemplate/__init__.py b/uritemplate/__init__.py
new file mode 100644
index 0000000..046cf61
--- /dev/null
+++ b/uritemplate/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+"""Module definition for uritemplate.
+
+   This differs from the upstream version (which is an empty file),
+   in order to expose the 'expand' method from the module
+   the same way that it is exposed by the upstream version.
+"""
+
+__author__ = 'ojarjur@google.com (Omar Jarjur)'
+
+from uritemplate import expand
diff --git a/uritemplate/uritemplate.py b/uritemplate/uritemplate.py
new file mode 100644
index 0000000..c681049
--- /dev/null
+++ b/uritemplate/uritemplate.py
@@ -0,0 +1,177 @@
+# Early, and incomplete implementation of -04.
+#
+import re
+import urllib
+
+RESERVED = ":/?#[]@!$&'()*+,;="
+OPERATOR = "+./;?|!@"
+EXPLODE = "*+"
+MODIFIER = ":^"
+TEMPLATE = re.compile("{([^\}]+)}")
+
+def _tostring(varname, value, explode, operator, safe=""):
+  if type(value) == type([]):
+    if explode == "+":
+      return ",".join([varname + "." + urllib.quote(x, safe) for x in value])
+    else:
+      return ",".join([urllib.quote(x, safe) for x in value])
+  if type(value) == type({}):
+    keys = value.keys()
+    keys.sort()
+    if explode == "+":
+      return ",".join([varname + "." + urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
+    else:
+      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
+  else:
+    return urllib.quote(value, safe)
+
+
+def _tostring_path(varname, value, explode, operator, safe=""):
+  joiner = operator
+  if type(value) == type([]):
+    if explode == "+":
+      return joiner.join([varname + "." + urllib.quote(x, safe) for x in value])
+    elif explode == "*":
+      return joiner.join([urllib.quote(x, safe) for x in value])
+    else:
+      return ",".join([urllib.quote(x, safe) for x in value])
+  elif type(value) == type({}):
+    keys = value.keys()
+    keys.sort()
+    if explode == "+":
+      return joiner.join([varname + "." + urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
+    elif explode == "*":
+      return joiner.join([urllib.quote(key, safe) + joiner + urllib.quote(value[key], safe) for key in keys])
+    else:
+      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
+  else:
+    if value:
+      return urllib.quote(value, safe)
+    else:
+      return ""
+
+
+def _tostring_semi(varname, value, explode, operator, safe=""):
+  joiner = operator
+  if operator == "?":
+    joiner = "&"
+  if type(value) == type([]):
+    if explode == "+":
+      return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
+    elif explode == "*":
+      return joiner.join([urllib.quote(x, safe) for x in value])
+    else:
+      return ",".join([urllib.quote(x, safe) for x in value])
+  elif type(value) == type({}):
+    keys = value.keys()
+    keys.sort()
+    if explode == "+":
+      return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
+    elif explode == "*":
+      return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
+    else:
+      return ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
+  else:
+    if value:
+      return varname + "=" + urllib.quote(value, safe)
+    else:
+      return varname 
+
+
+def _tostring_query(varname, value, explode, operator, safe=""):
+  joiner = operator
+  if operator == "?":
+    joiner = "&"
+  if type(value) == type([]):
+    if 0 == len(value):
+      return ""
+    if explode == "+":
+      return joiner.join([varname + "=" + urllib.quote(x, safe) for x in value])
+    elif explode == "*":
+      return joiner.join([urllib.quote(x, safe) for x in value])
+    else:
+      return varname + "=" + ",".join([urllib.quote(x, safe) for x in value])
+  elif type(value) == type({}):
+    if 0 == len(value):
+      return ""
+    keys = value.keys()
+    keys.sort()
+    if explode == "+":
+      return joiner.join([varname + "." + urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
+    elif explode == "*":
+      return joiner.join([urllib.quote(key, safe) + "=" + urllib.quote(value[key], safe) for key in keys])
+    else:
+      return varname + "=" + ",".join([urllib.quote(key, safe) + "," + urllib.quote(value[key], safe) for key in keys])
+  else:
+    if value:
+      return varname + "=" + urllib.quote(value, safe)
+    else:
+      return varname 
+
+TOSTRING = {
+    "" : _tostring,
+    "+": _tostring,
+    ";": _tostring_semi,
+    "?": _tostring_query,
+    "/": _tostring_path,
+    ".": _tostring_path,
+    }
+
+
+def expand(template, vars):
+  def _sub(match):
+    expression = match.group(1)
+    operator = ""
+    if expression[0] in OPERATOR:
+      operator = expression[0] 
+      varlist = expression[1:]
+    else:
+      varlist = expression
+
+    safe = ""
+    explode = ""
+    if operator == '+':
+      safe = RESERVED
+    varspecs = varlist.split(",")
+    varnames = []
+    defaults = {}
+    for varspec in varspecs:
+      default = None
+      if "=" in varspec:
+        varname, default = tuple(varspec.split("=", 1))
+      else:
+        varname = varspec
+      if varname[-1] in EXPLODE:
+        explode = varname[-1]
+        varname = varname[:-1]
+      if default:
+        defaults[varname] = default
+      varnames.append((varname, explode))
+
+    retval = []
+    joiner = operator
+    prefix = operator
+    if operator == "+":
+      prefix = ""
+      joiner = ","
+    if operator == "?":
+      joiner = "&"
+    if operator == "":
+      joiner = ","
+    for varname, explode in varnames:
+      if varname in vars:
+        value = vars[varname]
+        #if not value and (type(value) == type({}) or type(value) == type([])) and varname in defaults:
+        if not value and value != "" and varname in defaults:
+          value = defaults[varname]
+      elif varname in defaults:
+        value = defaults[varname]
+      else:
+        continue
+      retval.append(TOSTRING[operator](varname, value, explode, operator, safe=safe))
+    if "".join(retval):
+      return prefix + joiner.join(retval)
+    else:
+      return ""
+
+  return TEMPLATE.sub(_sub, template)