git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@199 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index 3ada141..b07d739 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,37 @@
App Engine Python SDK - Release Notes
+Version 1.5.4
+===============================
+- You can now specify the maximum size for a blob in create_upload_url().
+- Zigzag merge join queries will now continue scanning up to the 30 second
+ Datastore query deadline. For zigzag queries that used to generate NeedIndex
+ errors, many will now succeed. A small percentage will now instead timeout.
+- The SDK datastore viewer in the dev console now displays the number of "Write
+ Ops" for each entity. "Write Ops" are the total number of entity and index
+ writes that were required to create the entity.
+- Added API functionality for making calls to the Memcache API asynchronously.
+- Fixed an issue that incorrectly allowed creation of tasks with whitespace in
+ the url.
+- Fixed the error message for "transaction not found" to be more descriptive.
+- Fixed an issue where blobstore uploads didn't work in the SDK with the
+ -a 0.0.0.0 flag set.
+- Fixed an issue where --dry_run was broken for upload_data in bulkload.py.
+- Fixed an issue where db.Model().to_xml() incorrectly updated auto-updating
+ properties.
+ http://code.google.com/p/googleappengine/issues/detail?id=322
+- Fixed an issue where the SDK didn't expand the '~' in a file path.
+ http://code.google.com/p/googleappengine/issues/detail?id=522
+- Fixed an issue where is_saved() wasn't valid after db.Model.__init__.
+ http://code.google.com/p/googleappengine/issues/detail?id=844
+- Fixed an issue where GQL IN queries with an empty list returned all entities.
+ http://code.google.com/p/googleappengine/issues/detail?id=932
+- Fixed an issue where the SDK's sqlite stub did not handle cursors on
+ descending queries correctly.
+ http://code.google.com/p/googleappengine/issues/detail?id=3121
+- Fixed a typo in the SDK's Datastore Stats generator message.
+ http://code.google.com/p/googleappengine/issues/detail?id=5425
+
Version 1.5.3
=============================
- We've removed the limit on the size of blob uploads using the Blobstore API.
diff --git a/VERSION b/VERSION
index 862b270..14de515 100644
--- a/VERSION
+++ b/VERSION
@@ -1,3 +1,3 @@
-release: "1.5.3"
-timestamp: 1311108376
+release: "1.5.4"
+timestamp: 1313527828
api_versions: ['1']
diff --git a/appcfg.py b/appcfg.py
index 8f18e20..e673b9c 100755
--- a/appcfg.py
+++ b/appcfg.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]
diff --git a/bulkload_client.py b/bulkload_client.py
index 8f18e20..e673b9c 100755
--- a/bulkload_client.py
+++ b/bulkload_client.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]
diff --git a/bulkloader.py b/bulkloader.py
index 8f18e20..e673b9c 100755
--- a/bulkloader.py
+++ b/bulkloader.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]
diff --git a/dev_appserver.py b/dev_appserver.py
index 8f18e20..e673b9c 100755
--- a/dev_appserver.py
+++ b/dev_appserver.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]
diff --git a/gen_protorpc.py b/gen_protorpc.py
index 8f18e20..e673b9c 100755
--- a/gen_protorpc.py
+++ b/gen_protorpc.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]
diff --git a/google/appengine/_internal/__init__.py b/google/appengine/_internal/__init__.py
new file mode 100644
index 0000000..c33ae80
--- /dev/null
+++ b/google/appengine/_internal/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/appengine/_internal/django/__init__.py b/google/appengine/_internal/django/__init__.py
new file mode 100644
index 0000000..75f3ca2
--- /dev/null
+++ b/google/appengine/_internal/django/__init__.py
@@ -0,0 +1,16 @@
+VERSION = (1, 2, 5, 'final', 0)
+
+def get_version():
+ version = '%s.%s' % (VERSION[0], VERSION[1])
+ if VERSION[2]:
+ version = '%s.%s' % (version, VERSION[2])
+ if VERSION[3:] == ('alpha', 0):
+ version = '%s pre-alpha' % version
+ else:
+ if VERSION[3] != 'final':
+ version = '%s %s %s' % (version, VERSION[3], VERSION[4])
+ from google.appengine._internal.django.utils.version import get_svn_revision
+ svn_rev = get_svn_revision()
+ if svn_rev != u'SVN-unknown':
+ version = "%s %s" % (version, svn_rev)
+ return version
diff --git a/google/appengine/_internal/django/conf/__init__.py b/google/appengine/_internal/django/conf/__init__.py
new file mode 100644
index 0000000..9cdcbfb
--- /dev/null
+++ b/google/appengine/_internal/django/conf/__init__.py
@@ -0,0 +1,120 @@
+"""
+Settings and configuration for Django.
+
+Values will be read from the module specified by the DJANGO_SETTINGS_MODULE environment
+variable, and then from google.appengine._internal.django.conf.global_settings; see the global settings file for
+a list of all possible variables.
+"""
+
+import os
+import re
+import threading
+import time # Needed for Windows
+
+from google.appengine._internal.django.conf import global_settings
+from google.appengine._internal.django.utils.functional import LazyObject
+from google.appengine._internal.django.utils import importlib
+
+ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
+
+class LazySettings(threading.local):
+ """
+ A lazy proxy for either global Django settings or a custom settings object.
+ The user can manually configure settings prior to using them. Otherwise,
+ Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
+ """
+
+ def __init__(self):
+ self.holder = None
+
+ def __getattr__(self, name):
+ assert self.holder, 'settings has not been configured in this thread'
+ return getattr(self.holder, name)
+
+ def configure(self, default_settings=global_settings, **options):
+ """
+ Called to manually configure the settings. The 'default_settings'
+ parameter sets where to retrieve any unspecified values from (its
+ argument must support attribute access (__getattr__)).
+ """
+ self.holder = UserSettingsHolder(default_settings)
+ for name, value in options.items():
+ setattr(self.holder, name, value)
+
+ configured = True
+
+class Settings(object):
+ def __init__(self, settings_module):
+ # update this dict from global settings (but only for ALL_CAPS settings)
+ for setting in dir(global_settings):
+ if setting == setting.upper():
+ setattr(self, setting, getattr(global_settings, setting))
+
+ # store the settings module in case someone later cares
+ self.SETTINGS_MODULE = settings_module
+
+ try:
+ mod = importlib.import_module(self.SETTINGS_MODULE)
+ except ImportError, e:
+ raise ImportError("Could not import settings '%s' (Is it on sys.path? Does it have syntax errors?): %s" % (self.SETTINGS_MODULE, e))
+
+ # Settings that should be converted into tuples if they're mistakenly entered
+ # as strings.
+ tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")
+
+ for setting in dir(mod):
+ if setting == setting.upper():
+ setting_value = getattr(mod, setting)
+ if setting in tuple_settings and type(setting_value) == str:
+ setting_value = (setting_value,) # In case the user forgot the comma.
+ setattr(self, setting, setting_value)
+
+ # Expand entries in INSTALLED_APPS like "django.contrib.*" to a list
+ # of all those apps.
+ new_installed_apps = []
+ for app in self.INSTALLED_APPS:
+ if app.endswith('.*'):
+ app_mod = importlib.import_module(app[:-2])
+ appdir = os.path.dirname(app_mod.__file__)
+ app_subdirs = os.listdir(appdir)
+ app_subdirs.sort()
+ name_pattern = re.compile(r'[a-zA-Z]\w*')
+ for d in app_subdirs:
+ if name_pattern.match(d) and os.path.isdir(os.path.join(appdir, d)):
+ new_installed_apps.append('%s.%s' % (app[:-2], d))
+ else:
+ new_installed_apps.append(app)
+ self.INSTALLED_APPS = new_installed_apps
+
+ if hasattr(time, 'tzset') and getattr(self, 'TIME_ZONE'):
+ # Move the time zone info into os.environ. See ticket #2315 for why
+ # we don't do this unconditionally (breaks Windows).
+ os.environ['TZ'] = self.TIME_ZONE
+ time.tzset()
+
+class UserSettingsHolder(object):
+ """
+ Holder for user configured settings.
+ """
+ # SETTINGS_MODULE doesn't make much sense in the manually configured
+ # (standalone) case.
+ SETTINGS_MODULE = None
+
+ def __init__(self, default_settings):
+ """
+ Requests for configuration variables not in this class are satisfied
+ from the module specified in default_settings (if possible).
+ """
+ self.default_settings = default_settings
+
+ def __getattr__(self, name):
+ return getattr(self.default_settings, name)
+
+ def __dir__(self):
+ return self.__dict__.keys() + dir(self.default_settings)
+
+ # For Python < 2.6:
+ __members__ = property(lambda self: self.__dir__())
+
+settings = LazySettings()
+
diff --git a/google/appengine/_internal/django/conf/app_template/__init__.py b/google/appengine/_internal/django/conf/app_template/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/conf/app_template/__init__.py
diff --git a/google/appengine/_internal/django/conf/app_template/models.py b/google/appengine/_internal/django/conf/app_template/models.py
new file mode 100644
index 0000000..e2d2c42
--- /dev/null
+++ b/google/appengine/_internal/django/conf/app_template/models.py
@@ -0,0 +1,3 @@
+from google.appengine._internal.django.db import models
+
+# Create your models here.
diff --git a/google/appengine/_internal/django/conf/app_template/tests.py b/google/appengine/_internal/django/conf/app_template/tests.py
new file mode 100644
index 0000000..34cfea8
--- /dev/null
+++ b/google/appengine/_internal/django/conf/app_template/tests.py
@@ -0,0 +1,23 @@
+"""
+This file demonstrates two different styles of tests (one doctest and one
+unittest). These will both pass when you run "manage.py test".
+
+Replace these with more appropriate tests for your application.
+"""
+
+from google.appengine._internal.django.test import TestCase
+
+class SimpleTest(TestCase):
+ def test_basic_addition(self):
+ """
+ Tests that 1 + 1 always equals 2.
+ """
+ self.failUnlessEqual(1 + 1, 2)
+
+__test__ = {"doctest": """
+Another way to test that 1 + 1 is equal to 2.
+
+>>> 1 + 1 == 2
+True
+"""}
+
diff --git a/google/appengine/_internal/django/conf/app_template/views.py b/google/appengine/_internal/django/conf/app_template/views.py
new file mode 100644
index 0000000..60f00ef
--- /dev/null
+++ b/google/appengine/_internal/django/conf/app_template/views.py
@@ -0,0 +1 @@
+# Create your views here.
diff --git a/google/appengine/_internal/django/conf/global_settings.py b/google/appengine/_internal/django/conf/global_settings.py
new file mode 100644
index 0000000..ec3f1dd
--- /dev/null
+++ b/google/appengine/_internal/django/conf/global_settings.py
@@ -0,0 +1,524 @@
+# Default Django settings. Override these with settings in the module
+# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
+
+# This is defined here as a do-nothing function because we can't import
+# django.utils.translation -- that module depends on the settings.
+gettext_noop = lambda s: s
+
+####################
+# CORE #
+####################
+
+DEBUG = False
+TEMPLATE_DEBUG = False
+
+# Whether the framework should propagate raw exceptions rather than catching
+# them. This is useful under some testing siutations and should never be used
+# on a live site.
+DEBUG_PROPAGATE_EXCEPTIONS = False
+
+# Whether to use the "Etag" header. This saves bandwidth but slows down performance.
+USE_ETAGS = False
+
+# People who get code error notifications.
+# In the format (('Full Name', 'email@domain.com'), ('Full Name', 'anotheremail@domain.com'))
+ADMINS = ()
+
+# Tuple of IP addresses, as strings, that:
+# * See debug comments, when DEBUG is true
+# * Receive x-headers
+INTERNAL_IPS = ()
+
+# Local time zone for this installation. All choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
+# systems may support all possibilities).
+TIME_ZONE = 'America/Chicago'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-us'
+
+# Languages we provide translations for, out of the box. The language name
+# should be the utf-8 encoded local name for the language.
+LANGUAGES = (
+ ('ar', gettext_noop('Arabic')),
+ ('bg', gettext_noop('Bulgarian')),
+ ('bn', gettext_noop('Bengali')),
+ ('bs', gettext_noop('Bosnian')),
+ ('ca', gettext_noop('Catalan')),
+ ('cs', gettext_noop('Czech')),
+ ('cy', gettext_noop('Welsh')),
+ ('da', gettext_noop('Danish')),
+ ('de', gettext_noop('German')),
+ ('el', gettext_noop('Greek')),
+ ('en', gettext_noop('English')),
+ ('en-gb', gettext_noop('British English')),
+ ('es', gettext_noop('Spanish')),
+ ('es-ar', gettext_noop('Argentinian Spanish')),
+ ('et', gettext_noop('Estonian')),
+ ('eu', gettext_noop('Basque')),
+ ('fa', gettext_noop('Persian')),
+ ('fi', gettext_noop('Finnish')),
+ ('fr', gettext_noop('French')),
+ ('fy-nl', gettext_noop('Frisian')),
+ ('ga', gettext_noop('Irish')),
+ ('gl', gettext_noop('Galician')),
+ ('he', gettext_noop('Hebrew')),
+ ('hi', gettext_noop('Hindi')),
+ ('hr', gettext_noop('Croatian')),
+ ('hu', gettext_noop('Hungarian')),
+ ('id', gettext_noop('Indonesian')),
+ ('is', gettext_noop('Icelandic')),
+ ('it', gettext_noop('Italian')),
+ ('ja', gettext_noop('Japanese')),
+ ('ka', gettext_noop('Georgian')),
+ ('km', gettext_noop('Khmer')),
+ ('kn', gettext_noop('Kannada')),
+ ('ko', gettext_noop('Korean')),
+ ('lt', gettext_noop('Lithuanian')),
+ ('lv', gettext_noop('Latvian')),
+ ('mk', gettext_noop('Macedonian')),
+ ('ml', gettext_noop('Malayalam')),
+ ('mn', gettext_noop('Mongolian')),
+ ('nl', gettext_noop('Dutch')),
+ ('no', gettext_noop('Norwegian')),
+ ('nb', gettext_noop('Norwegian Bokmal')),
+ ('nn', gettext_noop('Norwegian Nynorsk')),
+ ('pl', gettext_noop('Polish')),
+ ('pt', gettext_noop('Portuguese')),
+ ('pt-br', gettext_noop('Brazilian Portuguese')),
+ ('ro', gettext_noop('Romanian')),
+ ('ru', gettext_noop('Russian')),
+ ('sk', gettext_noop('Slovak')),
+ ('sl', gettext_noop('Slovenian')),
+ ('sq', gettext_noop('Albanian')),
+ ('sr', gettext_noop('Serbian')),
+ ('sr-latn', gettext_noop('Serbian Latin')),
+ ('sv', gettext_noop('Swedish')),
+ ('ta', gettext_noop('Tamil')),
+ ('te', gettext_noop('Telugu')),
+ ('th', gettext_noop('Thai')),
+ ('tr', gettext_noop('Turkish')),
+ ('uk', gettext_noop('Ukrainian')),
+ ('vi', gettext_noop('Vietnamese')),
+ ('zh-cn', gettext_noop('Simplified Chinese')),
+ ('zh-tw', gettext_noop('Traditional Chinese')),
+)
+
+# Languages using BiDi (right-to-left) layout
+LANGUAGES_BIDI = ("he", "ar", "fa")
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+LOCALE_PATHS = ()
+LANGUAGE_COOKIE_NAME = 'django_language'
+
+# If you set this to True, Django will format dates, numbers and calendars
+# according to user current locale
+USE_L10N = False
+
+# Not-necessarily-technical managers of the site. They get broken link
+# notifications and other various e-mails.
+MANAGERS = ADMINS
+
+# Default content type and charset to use for all HttpResponse objects, if a
+# MIME type isn't manually specified. These are used to construct the
+# Content-Type header.
+DEFAULT_CONTENT_TYPE = 'text/html'
+DEFAULT_CHARSET = 'utf-8'
+
+# Encoding of files read from disk (template and initial SQL files).
+FILE_CHARSET = 'utf-8'
+
+# E-mail address that error messages come from.
+SERVER_EMAIL = 'root@localhost'
+
+# Whether to send broken-link e-mails.
+SEND_BROKEN_LINK_EMAILS = False
+
+# Database connection info.
+# Legacy format
+DATABASE_ENGINE = '' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
+DATABASE_NAME = '' # Or path to database file if using sqlite3.
+DATABASE_USER = '' # Not used with sqlite3.
+DATABASE_PASSWORD = '' # Not used with sqlite3.
+DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
+DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
+DATABASE_OPTIONS = {} # Set to empty dictionary for default.
+
+# New format
+DATABASES = {
+}
+
+# Classes used to implement db routing behaviour
+DATABASE_ROUTERS = []
+
+# The email backend to use. For possible shortcuts see django.core.mail.
+# The default is to use the SMTP backend.
+# Third-party backends can be specified by providing a Python path
+# to a module that defines an EmailBackend class.
+EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
+
+# Host for sending e-mail.
+EMAIL_HOST = 'localhost'
+
+# Port for sending e-mail.
+EMAIL_PORT = 25
+
+# Optional SMTP authentication information for EMAIL_HOST.
+EMAIL_HOST_USER = ''
+EMAIL_HOST_PASSWORD = ''
+EMAIL_USE_TLS = False
+
+# List of strings representing installed apps.
+INSTALLED_APPS = ()
+
+# List of locations of the template source files, in search order.
+TEMPLATE_DIRS = ()
+
+# List of callables that know how to import templates from various sources.
+# See the comments in django/core/template/loader.py for interface
+# documentation.
+TEMPLATE_LOADERS = (
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+# 'django.template.loaders.eggs.Loader',
+)
+
+# List of processors used by RequestContext to populate the context.
+# Each one should be a callable that takes the request object as its
+# only parameter and returns a dictionary to add to the context.
+TEMPLATE_CONTEXT_PROCESSORS = (
+ 'django.contrib.auth.context_processors.auth',
+ 'django.core.context_processors.debug',
+ 'django.core.context_processors.i18n',
+ 'django.core.context_processors.media',
+# 'django.core.context_processors.request',
+ 'django.contrib.messages.context_processors.messages',
+)
+
+# Output to use in template system for invalid (e.g. misspelled) variables.
+TEMPLATE_STRING_IF_INVALID = ''
+
+# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
+# trailing slash.
+# Examples: "http://foo.com/media/", "/media/".
+ADMIN_MEDIA_PREFIX = '/media/'
+
+# Default e-mail address to use for various automated correspondence from
+# the site managers.
+DEFAULT_FROM_EMAIL = 'webmaster@localhost'
+
+# Subject-line prefix for email messages send with django.core.mail.mail_admins
+# or ...mail_managers. Make sure to include the trailing space.
+EMAIL_SUBJECT_PREFIX = '[Django] '
+
+# Whether to append trailing slashes to URLs.
+APPEND_SLASH = True
+
+# Whether to prepend the "www." subdomain to URLs that don't have it.
+PREPEND_WWW = False
+
+# Override the server-derived value of SCRIPT_NAME
+FORCE_SCRIPT_NAME = None
+
+# List of compiled regular expression objects representing User-Agent strings
+# that are not allowed to visit any page, systemwide. Use this for bad
+# robots/crawlers. Here are a few examples:
+# import re
+# DISALLOWED_USER_AGENTS = (
+# re.compile(r'^NaverBot.*'),
+# re.compile(r'^EmailSiphon.*'),
+# re.compile(r'^SiteSucker.*'),
+# re.compile(r'^sohu-search')
+# )
+DISALLOWED_USER_AGENTS = ()
+
+ABSOLUTE_URL_OVERRIDES = {}
+
+# Tuple of strings representing allowed prefixes for the {% ssi %} tag.
+# Example: ('/home/html', '/var/www')
+ALLOWED_INCLUDE_ROOTS = ()
+
+# If this is a admin settings module, this should be a list of
+# settings modules (in the format 'foo.bar.baz') for which this admin
+# is an admin.
+ADMIN_FOR = ()
+
+# 404s that may be ignored.
+IGNORABLE_404_STARTS = ('/cgi-bin/', '/_vti_bin', '/_vti_inf')
+IGNORABLE_404_ENDS = ('mail.pl', 'mailform.pl', 'mail.cgi', 'mailform.cgi', 'favicon.ico', '.php')
+
+# A secret key for this particular Django installation. Used in secret-key
+# hashing algorithms. Set this in your settings, or Django will complain
+# loudly.
+SECRET_KEY = ''
+
+# Default file storage mechanism that holds media.
+DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: "/home/media/media.lawrence.com/"
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT.
+# Example: "http://media.lawrence.com"
+MEDIA_URL = ''
+
+# List of upload handler classes to be applied in order.
+FILE_UPLOAD_HANDLERS = (
+ 'django.core.files.uploadhandler.MemoryFileUploadHandler',
+ 'django.core.files.uploadhandler.TemporaryFileUploadHandler',
+)
+
+# Maximum size, in bytes, of a request before it will be streamed to the
+# file system instead of into memory.
+FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
+
+# Directory in which upload streamed files will be temporarily saved. A value of
+# `None` will make Django use the operating system's default temporary directory
+# (i.e. "/tmp" on *nix systems).
+FILE_UPLOAD_TEMP_DIR = None
+
+# The numeric mode to set newly-uploaded files to. The value should be a mode
+# you'd pass directly to os.chmod; see http://docs.python.org/lib/os-file-dir.html.
+FILE_UPLOAD_PERMISSIONS = None
+
+# Python module path where user will place custom format definition.
+# The directory where this setting is pointing should contain subdirectories
+# named as the locales, containing a formats.py file
+# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
+FORMAT_MODULE_PATH = None
+
+# Default formatting for date objects. See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = 'N j, Y'
+
+# Default formatting for datetime objects. See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATETIME_FORMAT = 'N j, Y, P'
+
+# Default formatting for time objects. See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+TIME_FORMAT = 'P'
+
+# Default formatting for date objects when only the year and month are relevant.
+# See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+YEAR_MONTH_FORMAT = 'F Y'
+
+# Default formatting for date objects when only the month and day are relevant.
+# See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+MONTH_DAY_FORMAT = 'F j'
+
+# Default short formatting for date objects. See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+SHORT_DATE_FORMAT = 'm/d/Y'
+
+# Default short formatting for datetime objects.
+# See all available format strings here:
+# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+SHORT_DATETIME_FORMAT = 'm/d/Y P'
+
+# Default formats to be used when parsing dates from input boxes, in order
+# See all available format string here:
+# http://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+DATE_INPUT_FORMATS = (
+ '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
+ '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
+ '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
+ '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
+ '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
+)
+
+# Default formats to be used when parsing times from input boxes, in order
+# See all available format string here:
+# http://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+TIME_INPUT_FORMATS = (
+ '%H:%M:%S', # '14:30:59'
+ '%H:%M', # '14:30'
+)
+
+# Default formats to be used when parsing dates and times from input boxes,
+# in order
+# See all available format string here:
+# http://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+DATETIME_INPUT_FORMATS = (
+ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
+ '%Y-%m-%d %H:%M', # '2006-10-25 14:30'
+ '%Y-%m-%d', # '2006-10-25'
+ '%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
+ '%m/%d/%Y %H:%M', # '10/25/2006 14:30'
+ '%m/%d/%Y', # '10/25/2006'
+ '%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
+ '%m/%d/%y %H:%M', # '10/25/06 14:30'
+ '%m/%d/%y', # '10/25/06'
+)
+
+# First day of week, to be used on calendars
+# 0 means Sunday, 1 means Monday...
+FIRST_DAY_OF_WEEK = 0
+
+# Decimal separator symbol
+DECIMAL_SEPARATOR = '.'
+
+# Boolean that sets whether to add thousand separator when formatting numbers
+USE_THOUSAND_SEPARATOR = False
+
+# Number of digits that will be together, when spliting them by
+# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
+NUMBER_GROUPING = 0
+
+# Thousand separator symbol
+THOUSAND_SEPARATOR = ','
+
+# Do you want to manage transactions manually?
+# Hint: you really don't!
+TRANSACTIONS_MANAGED = False
+
+# The User-Agent string to use when checking for URL validity through the
+# isExistingURL validator.
+from google.appengine._internal.django import get_version
+URL_VALIDATOR_USER_AGENT = "Django/%s (http://www.djangoproject.com)" % get_version()
+
+# The tablespaces to use for each model when not specified otherwise.
+DEFAULT_TABLESPACE = ''
+DEFAULT_INDEX_TABLESPACE = ''
+
+##############
+# MIDDLEWARE #
+##############
+
+# List of middleware classes to use. Order is important; in the request phase,
+# this middleware classes will be applied in the order given, and in the
+# response phase the middleware will be applied in reverse order.
+MIDDLEWARE_CLASSES = (
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+# 'django.middleware.http.ConditionalGetMiddleware',
+# 'django.middleware.gzip.GZipMiddleware',
+)
+
+############
+# SESSIONS #
+############
+
+SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want.
+SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).
+SESSION_COOKIE_DOMAIN = None # A string like ".lawrence.com", or None for standard domain cookie.
+SESSION_COOKIE_SECURE = False # Whether the session cookie should be secure (https:// only).
+SESSION_COOKIE_PATH = '/' # The path of the session cookie.
+SESSION_SAVE_EVERY_REQUEST = False # Whether to save the session data on every request.
+SESSION_EXPIRE_AT_BROWSER_CLOSE = False # Whether a user's session cookie expires when the Web browser is closed.
+SESSION_ENGINE = 'django.contrib.sessions.backends.db' # The module to store session data
+SESSION_FILE_PATH = None # Directory to store session files if using the file session module. If None, the backend will use a sensible default.
+
+#########
+# CACHE #
+#########
+
+# The cache backend to use. See the docstring in django.core.cache for the
+# possible values.
+CACHE_BACKEND = 'locmem://'
+CACHE_MIDDLEWARE_KEY_PREFIX = ''
+CACHE_MIDDLEWARE_SECONDS = 600
+
+####################
+# COMMENTS #
+####################
+
+COMMENTS_ALLOW_PROFANITIES = False
+
+# The profanities that will trigger a validation error in the
+# 'hasNoProfanities' validator. All of these should be in lowercase.
+PROFANITIES_LIST = ('asshat', 'asshead', 'asshole', 'cunt', 'fuck', 'gook', 'nigger', 'shit')
+
+# The group ID that designates which users are banned.
+# Set to None if you're not using it.
+COMMENTS_BANNED_USERS_GROUP = None
+
+# The group ID that designates which users can moderate comments.
+# Set to None if you're not using it.
+COMMENTS_MODERATORS_GROUP = None
+
+# The group ID that designates the users whose comments should be e-mailed to MANAGERS.
+# Set to None if you're not using it.
+COMMENTS_SKETCHY_USERS_GROUP = None
+
+# The system will e-mail MANAGERS the first COMMENTS_FIRST_FEW comments by each
+# user. Set this to 0 if you want to disable it.
+COMMENTS_FIRST_FEW = 0
+
+# A tuple of IP addresses that have been banned from participating in various
+# Django-powered features.
+BANNED_IPS = ()
+
+##################
+# AUTHENTICATION #
+##################
+
+AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
+
+LOGIN_URL = '/accounts/login/'
+
+LOGOUT_URL = '/accounts/logout/'
+
+LOGIN_REDIRECT_URL = '/accounts/profile/'
+
+# The number of days a password reset link is valid for
+PASSWORD_RESET_TIMEOUT_DAYS = 3
+
+########
+# CSRF #
+########
+
+# Dotted path to callable to be used as view when a request is
+# rejected by the CSRF middleware.
+CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
+
+# Name and domain for CSRF cookie.
+CSRF_COOKIE_NAME = 'csrftoken'
+CSRF_COOKIE_DOMAIN = None
+
+############
+# MESSAGES #
+############
+
+# Class to use as messges backend
+MESSAGE_STORAGE = 'django.contrib.messages.storage.user_messages.LegacyFallbackStorage'
+
+# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
+# django.contrib.messages to avoid imports in this settings file.
+
+###########
+# TESTING #
+###########
+
+# The name of the class to use to run the test suite
+TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
+
+# The name of the database to use for testing purposes.
+# If None, a name of 'test_' + DATABASE_NAME will be assumed
+TEST_DATABASE_NAME = None
+
+# Strings used to set the character set and collation order for the test
+# database. These values are passed literally to the server, so they are
+# backend-dependent. If None, no special settings are sent (system defaults are
+# used).
+TEST_DATABASE_CHARSET = None
+TEST_DATABASE_COLLATION = None
+
+############
+# FIXTURES #
+############
+
+# The list of directories to search for fixtures
+FIXTURE_DIRS = ()
diff --git a/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.mo b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.mo
new file mode 100644
index 0000000..c498c9a
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.mo
Binary files differ
diff --git a/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.po b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.po
new file mode 100644
index 0000000..dc13bb5
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/django.po
@@ -0,0 +1,5002 @@
+# This file is distributed under the same license as the Django package.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: Django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2010-05-13 15:35+0200\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: conf/global_settings.py:44
+msgid "Arabic"
+msgstr ""
+
+#: conf/global_settings.py:45
+msgid "Bulgarian"
+msgstr ""
+
+#: conf/global_settings.py:46
+msgid "Bengali"
+msgstr ""
+
+#: conf/global_settings.py:47
+msgid "Bosnian"
+msgstr ""
+
+#: conf/global_settings.py:48
+msgid "Catalan"
+msgstr ""
+
+#: conf/global_settings.py:49
+msgid "Czech"
+msgstr ""
+
+#: conf/global_settings.py:50
+msgid "Welsh"
+msgstr ""
+
+#: conf/global_settings.py:51
+msgid "Danish"
+msgstr ""
+
+#: conf/global_settings.py:52
+msgid "German"
+msgstr ""
+
+#: conf/global_settings.py:53
+msgid "Greek"
+msgstr ""
+
+#: conf/global_settings.py:54
+msgid "English"
+msgstr ""
+
+#: conf/global_settings.py:55
+msgid "British English"
+msgstr ""
+
+#: conf/global_settings.py:56
+msgid "Spanish"
+msgstr ""
+
+#: conf/global_settings.py:57
+msgid "Argentinean Spanish"
+msgstr ""
+
+#: conf/global_settings.py:58
+msgid "Estonian"
+msgstr ""
+
+#: conf/global_settings.py:59
+msgid "Basque"
+msgstr ""
+
+#: conf/global_settings.py:60
+msgid "Persian"
+msgstr ""
+
+#: conf/global_settings.py:61
+msgid "Finnish"
+msgstr ""
+
+#: conf/global_settings.py:62
+msgid "French"
+msgstr ""
+
+#: conf/global_settings.py:63
+msgid "Frisian"
+msgstr ""
+
+#: conf/global_settings.py:64
+msgid "Irish"
+msgstr ""
+
+#: conf/global_settings.py:65
+msgid "Galician"
+msgstr ""
+
+#: conf/global_settings.py:66
+msgid "Hebrew"
+msgstr ""
+
+#: conf/global_settings.py:67
+msgid "Hindi"
+msgstr ""
+
+#: conf/global_settings.py:68
+msgid "Croatian"
+msgstr ""
+
+#: conf/global_settings.py:69
+msgid "Hungarian"
+msgstr ""
+
+#: conf/global_settings.py:70
+msgid "Indonesian"
+msgstr ""
+
+#: conf/global_settings.py:71
+msgid "Icelandic"
+msgstr ""
+
+#: conf/global_settings.py:72
+msgid "Italian"
+msgstr ""
+
+#: conf/global_settings.py:73
+msgid "Japanese"
+msgstr ""
+
+#: conf/global_settings.py:74
+msgid "Georgian"
+msgstr ""
+
+#: conf/global_settings.py:75
+msgid "Khmer"
+msgstr ""
+
+#: conf/global_settings.py:76
+msgid "Kannada"
+msgstr ""
+
+#: conf/global_settings.py:77
+msgid "Korean"
+msgstr ""
+
+#: conf/global_settings.py:78
+msgid "Lithuanian"
+msgstr ""
+
+#: conf/global_settings.py:79
+msgid "Latvian"
+msgstr ""
+
+#: conf/global_settings.py:80
+msgid "Macedonian"
+msgstr ""
+
+#: conf/global_settings.py:81
+msgid "Mongolian"
+msgstr ""
+
+#: conf/global_settings.py:82
+msgid "Dutch"
+msgstr ""
+
+#: conf/global_settings.py:83
+msgid "Norwegian"
+msgstr ""
+
+#: conf/global_settings.py:84
+msgid "Norwegian Bokmal"
+msgstr ""
+
+#: conf/global_settings.py:85
+msgid "Norwegian Nynorsk"
+msgstr ""
+
+#: conf/global_settings.py:86
+msgid "Polish"
+msgstr ""
+
+#: conf/global_settings.py:87
+msgid "Portuguese"
+msgstr ""
+
+#: conf/global_settings.py:88
+msgid "Brazilian Portuguese"
+msgstr ""
+
+#: conf/global_settings.py:89
+msgid "Romanian"
+msgstr ""
+
+#: conf/global_settings.py:90
+msgid "Russian"
+msgstr ""
+
+#: conf/global_settings.py:91
+msgid "Slovak"
+msgstr ""
+
+#: conf/global_settings.py:92
+msgid "Slovenian"
+msgstr ""
+
+#: conf/global_settings.py:93
+msgid "Albanian"
+msgstr ""
+
+#: conf/global_settings.py:94
+msgid "Serbian"
+msgstr ""
+
+#: conf/global_settings.py:95
+msgid "Serbian Latin"
+msgstr ""
+
+#: conf/global_settings.py:96
+msgid "Swedish"
+msgstr ""
+
+#: conf/global_settings.py:97
+msgid "Tamil"
+msgstr ""
+
+#: conf/global_settings.py:98
+msgid "Telugu"
+msgstr ""
+
+#: conf/global_settings.py:99
+msgid "Thai"
+msgstr ""
+
+#: conf/global_settings.py:100
+msgid "Turkish"
+msgstr ""
+
+#: conf/global_settings.py:101
+msgid "Ukrainian"
+msgstr ""
+
+#: conf/global_settings.py:102
+msgid "Vietnamese"
+msgstr ""
+
+#: conf/global_settings.py:103
+msgid "Simplified Chinese"
+msgstr ""
+
+#: conf/global_settings.py:104
+msgid "Traditional Chinese"
+msgstr ""
+
+#: contrib/admin/actions.py:48
+#, python-format
+msgid "Successfully deleted %(count)d %(items)s."
+msgstr ""
+
+#: contrib/admin/actions.py:55 contrib/admin/options.py:1125
+msgid "Are you sure?"
+msgstr ""
+
+#: contrib/admin/actions.py:73
+#, python-format
+msgid "Delete selected %(verbose_name_plural)s"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:44
+#, python-format
+msgid ""
+"<h3>By %s:</h3>\n"
+"<ul>\n"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:75 contrib/admin/filterspecs.py:92
+#: contrib/admin/filterspecs.py:147 contrib/admin/filterspecs.py:173
+msgid "All"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:113
+msgid "Any date"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:114
+msgid "Today"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:117
+msgid "Past 7 days"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:119
+msgid "This month"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:121
+msgid "This year"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:147 forms/widgets.py:469
+msgid "Yes"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:147 forms/widgets.py:469
+msgid "No"
+msgstr ""
+
+#: contrib/admin/filterspecs.py:154 forms/widgets.py:469
+msgid "Unknown"
+msgstr ""
+
+#: contrib/admin/helpers.py:20
+msgid "Action:"
+msgstr ""
+
+#: contrib/admin/models.py:19
+msgid "action time"
+msgstr ""
+
+#: contrib/admin/models.py:22
+msgid "object id"
+msgstr ""
+
+#: contrib/admin/models.py:23
+msgid "object repr"
+msgstr ""
+
+#: contrib/admin/models.py:24
+msgid "action flag"
+msgstr ""
+
+#: contrib/admin/models.py:25
+msgid "change message"
+msgstr ""
+
+#: contrib/admin/models.py:28
+msgid "log entry"
+msgstr ""
+
+#: contrib/admin/models.py:29
+msgid "log entries"
+msgstr ""
+
+#: contrib/admin/options.py:138 contrib/admin/options.py:153
+msgid "None"
+msgstr ""
+
+#: contrib/admin/options.py:559
+#, python-format
+msgid "Changed %s."
+msgstr ""
+
+#: contrib/admin/options.py:559 contrib/admin/options.py:569
+#: contrib/comments/templates/comments/preview.html:16 db/models/base.py:845
+#: forms/models.py:568
+msgid "and"
+msgstr ""
+
+#: contrib/admin/options.py:564
+#, python-format
+msgid "Added %(name)s \"%(object)s\"."
+msgstr ""
+
+#: contrib/admin/options.py:568
+#, python-format
+msgid "Changed %(list)s for %(name)s \"%(object)s\"."
+msgstr ""
+
+#: contrib/admin/options.py:573
+#, python-format
+msgid "Deleted %(name)s \"%(object)s\"."
+msgstr ""
+
+#: contrib/admin/options.py:577
+msgid "No fields changed."
+msgstr ""
+
+#: contrib/admin/options.py:643
+#, python-format
+msgid "The %(name)s \"%(obj)s\" was added successfully."
+msgstr ""
+
+#: contrib/admin/options.py:647 contrib/admin/options.py:680
+msgid "You may edit it again below."
+msgstr ""
+
+#: contrib/admin/options.py:657 contrib/admin/options.py:690
+#, python-format
+msgid "You may add another %s below."
+msgstr ""
+
+#: contrib/admin/options.py:678
+#, python-format
+msgid "The %(name)s \"%(obj)s\" was changed successfully."
+msgstr ""
+
+#: contrib/admin/options.py:686
+#, python-format
+msgid ""
+"The %(name)s \"%(obj)s\" was added successfully. You may edit it again below."
+msgstr ""
+
+#: contrib/admin/options.py:740 contrib/admin/options.py:997
+msgid ""
+"Items must be selected in order to perform actions on them. No items have "
+"been changed."
+msgstr ""
+
+#: contrib/admin/options.py:759
+msgid "No action selected."
+msgstr ""
+
+#: contrib/admin/options.py:840
+#, python-format
+msgid "Add %s"
+msgstr ""
+
+#: contrib/admin/options.py:866 contrib/admin/options.py:1105
+#, python-format
+msgid "%(name)s object with primary key %(key)r does not exist."
+msgstr ""
+
+#: contrib/admin/options.py:931
+#, python-format
+msgid "Change %s"
+msgstr ""
+
+#: contrib/admin/options.py:977
+msgid "Database error"
+msgstr ""
+
+#: contrib/admin/options.py:1039
+#, python-format
+msgid "%(count)s %(name)s was changed successfully."
+msgid_plural "%(count)s %(name)s were changed successfully."
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/admin/options.py:1066
+#, python-format
+msgid "%(total_count)s selected"
+msgid_plural "All %(total_count)s selected"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/admin/options.py:1071
+#, python-format
+msgid "0 of %(cnt)s selected"
+msgstr ""
+
+#: contrib/admin/options.py:1118
+#, python-format
+msgid "The %(name)s \"%(obj)s\" was deleted successfully."
+msgstr ""
+
+#: contrib/admin/options.py:1155
+#, python-format
+msgid "Change history: %s"
+msgstr ""
+
+#: contrib/admin/sites.py:18 contrib/admin/views/decorators.py:14
+#: contrib/auth/forms.py:81
+msgid ""
+"Please enter a correct username and password. Note that both fields are case-"
+"sensitive."
+msgstr ""
+
+#: contrib/admin/sites.py:307 contrib/admin/views/decorators.py:40
+msgid "Please log in again, because your session has expired."
+msgstr ""
+
+#: contrib/admin/sites.py:314 contrib/admin/views/decorators.py:47
+msgid ""
+"Looks like your browser isn't configured to accept cookies. Please enable "
+"cookies, reload this page, and try again."
+msgstr ""
+
+#: contrib/admin/sites.py:330 contrib/admin/sites.py:336
+#: contrib/admin/views/decorators.py:66
+msgid "Usernames cannot contain the '@' character."
+msgstr ""
+
+#: contrib/admin/sites.py:333 contrib/admin/views/decorators.py:62
+#, python-format
+msgid "Your e-mail address is not your username. Try '%s' instead."
+msgstr ""
+
+#: contrib/admin/sites.py:389
+msgid "Site administration"
+msgstr ""
+
+#: contrib/admin/sites.py:403 contrib/admin/templates/admin/login.html:26
+#: contrib/admin/templates/registration/password_reset_complete.html:14
+#: contrib/admin/views/decorators.py:20
+msgid "Log in"
+msgstr ""
+
+#: contrib/admin/sites.py:448
+#, python-format
+msgid "%s administration"
+msgstr ""
+
+#: contrib/admin/widgets.py:75
+msgid "Date:"
+msgstr ""
+
+#: contrib/admin/widgets.py:75
+msgid "Time:"
+msgstr ""
+
+#: contrib/admin/widgets.py:99
+msgid "Currently:"
+msgstr ""
+
+#: contrib/admin/widgets.py:99
+msgid "Change:"
+msgstr ""
+
+#: contrib/admin/widgets.py:129
+msgid "Lookup"
+msgstr ""
+
+#: contrib/admin/widgets.py:244
+msgid "Add Another"
+msgstr ""
+
+#: contrib/admin/templates/admin/404.html:4
+#: contrib/admin/templates/admin/404.html:8
+msgid "Page not found"
+msgstr ""
+
+#: contrib/admin/templates/admin/404.html:10
+msgid "We're sorry, but the requested page could not be found."
+msgstr ""
+
+#: contrib/admin/templates/admin/500.html:4
+#: contrib/admin/templates/admin/app_index.html:8
+#: contrib/admin/templates/admin/base.html:55
+#: contrib/admin/templates/admin/change_form.html:18
+#: contrib/admin/templates/admin/change_list.html:42
+#: contrib/admin/templates/admin/delete_confirmation.html:6
+#: contrib/admin/templates/admin/delete_selected_confirmation.html:6
+#: contrib/admin/templates/admin/invalid_setup.html:4
+#: contrib/admin/templates/admin/object_history.html:6
+#: contrib/admin/templates/admin/auth/user/change_password.html:11
+#: contrib/admin/templates/registration/logged_out.html:4
+#: contrib/admin/templates/registration/password_change_done.html:4
+#: contrib/admin/templates/registration/password_change_form.html:5
+#: contrib/admin/templates/registration/password_reset_complete.html:4
+#: contrib/admin/templates/registration/password_reset_confirm.html:4
+#: contrib/admin/templates/registration/password_reset_done.html:4
+#: contrib/admin/templates/registration/password_reset_form.html:4
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:3
+msgid "Home"
+msgstr ""
+
+#: contrib/admin/templates/admin/500.html:4
+msgid "Server error"
+msgstr ""
+
+#: contrib/admin/templates/admin/500.html:6
+msgid "Server error (500)"
+msgstr ""
+
+#: contrib/admin/templates/admin/500.html:9
+msgid "Server Error <em>(500)</em>"
+msgstr ""
+
+#: contrib/admin/templates/admin/500.html:10
+msgid ""
+"There's been an error. It's been reported to the site administrators via e-"
+"mail and should be fixed shortly. Thanks for your patience."
+msgstr ""
+
+#: contrib/admin/templates/admin/actions.html:4
+msgid "Run the selected action"
+msgstr ""
+
+#: contrib/admin/templates/admin/actions.html:4
+msgid "Go"
+msgstr ""
+
+#: contrib/admin/templates/admin/actions.html:11
+msgid "Click here to select the objects across all pages"
+msgstr ""
+
+#: contrib/admin/templates/admin/actions.html:11
+#, python-format
+msgid "Select all %(total_count)s %(module_name)s"
+msgstr ""
+
+#: contrib/admin/templates/admin/actions.html:13
+msgid "Clear selection"
+msgstr ""
+
+#: contrib/admin/templates/admin/app_index.html:10
+#: contrib/admin/templates/admin/index.html:19
+#, python-format
+msgid "%(name)s"
+msgstr ""
+
+#: contrib/admin/templates/admin/base.html:28
+msgid "Welcome,"
+msgstr ""
+
+#: contrib/admin/templates/admin/base.html:33
+#: contrib/admin/templates/registration/password_change_done.html:3
+#: contrib/admin/templates/registration/password_change_form.html:4
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:3
+msgid "Documentation"
+msgstr ""
+
+#: contrib/admin/templates/admin/base.html:41
+#: contrib/admin/templates/admin/auth/user/change_password.html:15
+#: contrib/admin/templates/admin/auth/user/change_password.html:48
+#: contrib/admin/templates/registration/password_change_done.html:3
+#: contrib/admin/templates/registration/password_change_form.html:4
+msgid "Change password"
+msgstr ""
+
+#: contrib/admin/templates/admin/base.html:48
+#: contrib/admin/templates/registration/password_change_done.html:3
+#: contrib/admin/templates/registration/password_change_form.html:4
+msgid "Log out"
+msgstr ""
+
+#: contrib/admin/templates/admin/base_site.html:4
+msgid "Django site admin"
+msgstr ""
+
+#: contrib/admin/templates/admin/base_site.html:7
+msgid "Django administration"
+msgstr ""
+
+#: contrib/admin/templates/admin/change_form.html:21
+#: contrib/admin/templates/admin/index.html:29
+msgid "Add"
+msgstr ""
+
+#: contrib/admin/templates/admin/change_form.html:28
+#: contrib/admin/templates/admin/object_history.html:10
+msgid "History"
+msgstr ""
+
+#: contrib/admin/templates/admin/change_form.html:29
+#: contrib/admin/templates/admin/edit_inline/stacked.html:9
+#: contrib/admin/templates/admin/edit_inline/tabular.html:28
+msgid "View on site"
+msgstr ""
+
+#: contrib/admin/templates/admin/change_form.html:39
+#: contrib/admin/templates/admin/change_list.html:71
+#: contrib/admin/templates/admin/auth/user/change_password.html:24
+#: contrib/admin/templates/registration/password_change_form.html:15
+msgid "Please correct the error below."
+msgid_plural "Please correct the errors below."
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/admin/templates/admin/change_list.html:63
+#, python-format
+msgid "Add %(name)s"
+msgstr ""
+
+#: contrib/admin/templates/admin/change_list.html:82
+msgid "Filter"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_confirmation.html:10
+#: contrib/admin/templates/admin/submit_line.html:4 forms/formsets.py:302
+msgid "Delete"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_confirmation.html:16
+#, python-format
+msgid ""
+"Deleting the %(object_name)s '%(escaped_object)s' would result in deleting "
+"related objects, but your account doesn't have permission to delete the "
+"following types of objects:"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_confirmation.html:23
+#, python-format
+msgid ""
+"Are you sure you want to delete the %(object_name)s \"%(escaped_object)s\"? "
+"All of the following related items will be deleted:"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_confirmation.html:28
+#: contrib/admin/templates/admin/delete_selected_confirmation.html:33
+msgid "Yes, I'm sure"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_selected_confirmation.html:9
+msgid "Delete multiple objects"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_selected_confirmation.html:15
+#, python-format
+msgid ""
+"Deleting the %(object_name)s would result in deleting related objects, but "
+"your account doesn't have permission to delete the following types of "
+"objects:"
+msgstr ""
+
+#: contrib/admin/templates/admin/delete_selected_confirmation.html:22
+#, python-format
+msgid ""
+"Are you sure you want to delete the selected %(object_name)s objects? All of "
+"the following objects and their related items will be deleted:"
+msgstr ""
+
+#: contrib/admin/templates/admin/filter.html:2
+#, python-format
+msgid " By %(filter_title)s "
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:18
+#, python-format
+msgid "Models available in the %(name)s application."
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:35
+msgid "Change"
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:45
+msgid "You don't have permission to edit anything."
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:53
+msgid "Recent Actions"
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:54
+msgid "My Actions"
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:58
+msgid "None available"
+msgstr ""
+
+#: contrib/admin/templates/admin/index.html:72
+msgid "Unknown content"
+msgstr ""
+
+#: contrib/admin/templates/admin/invalid_setup.html:7
+msgid ""
+"Something's wrong with your database installation. Make sure the appropriate "
+"database tables have been created, and make sure the database is readable by "
+"the appropriate user."
+msgstr ""
+
+#: contrib/admin/templates/admin/login.html:19
+msgid "Username:"
+msgstr ""
+
+#: contrib/admin/templates/admin/login.html:22
+msgid "Password:"
+msgstr ""
+
+#: contrib/admin/templates/admin/object_history.html:22
+msgid "Date/time"
+msgstr ""
+
+#: contrib/admin/templates/admin/object_history.html:23
+msgid "User"
+msgstr ""
+
+#: contrib/admin/templates/admin/object_history.html:24
+msgid "Action"
+msgstr ""
+
+#: contrib/admin/templates/admin/object_history.html:38
+msgid ""
+"This object doesn't have a change history. It probably wasn't added via this "
+"admin site."
+msgstr ""
+
+#: contrib/admin/templates/admin/pagination.html:10
+msgid "Show all"
+msgstr ""
+
+#: contrib/admin/templates/admin/pagination.html:11
+#: contrib/admin/templates/admin/submit_line.html:3
+msgid "Save"
+msgstr ""
+
+#: contrib/admin/templates/admin/search_form.html:8
+msgid "Search"
+msgstr ""
+
+#: contrib/admin/templates/admin/search_form.html:10
+#, python-format
+msgid "1 result"
+msgid_plural "%(counter)s results"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/admin/templates/admin/search_form.html:10
+#, python-format
+msgid "%(full_result_count)s total"
+msgstr ""
+
+#: contrib/admin/templates/admin/submit_line.html:5
+msgid "Save as new"
+msgstr ""
+
+#: contrib/admin/templates/admin/submit_line.html:6
+msgid "Save and add another"
+msgstr ""
+
+#: contrib/admin/templates/admin/submit_line.html:7
+msgid "Save and continue editing"
+msgstr ""
+
+#: contrib/admin/templates/admin/auth/user/add_form.html:5
+msgid ""
+"First, enter a username and password. Then, you'll be able to edit more user "
+"options."
+msgstr ""
+
+#: contrib/admin/templates/admin/auth/user/change_password.html:28
+#, python-format
+msgid "Enter a new password for the user <strong>%(username)s</strong>."
+msgstr ""
+
+#: contrib/admin/templates/admin/auth/user/change_password.html:35
+#: contrib/auth/forms.py:17 contrib/auth/forms.py:61 contrib/auth/forms.py:186
+msgid "Password"
+msgstr ""
+
+#: contrib/admin/templates/admin/auth/user/change_password.html:41
+#: contrib/admin/templates/registration/password_change_form.html:37
+#: contrib/auth/forms.py:187
+msgid "Password (again)"
+msgstr ""
+
+#: contrib/admin/templates/admin/auth/user/change_password.html:42
+#: contrib/auth/forms.py:19
+msgid "Enter the same password as above, for verification."
+msgstr ""
+
+#: contrib/admin/templates/admin/edit_inline/stacked.html:64
+#: contrib/admin/templates/admin/edit_inline/tabular.html:110
+#, python-format
+msgid "Add another %(verbose_name)s"
+msgstr ""
+
+#: contrib/admin/templates/admin/edit_inline/stacked.html:67
+#: contrib/admin/templates/admin/edit_inline/tabular.html:113
+#: contrib/comments/templates/comments/delete.html:12
+msgid "Remove"
+msgstr ""
+
+#: contrib/admin/templates/admin/edit_inline/tabular.html:15
+msgid "Delete?"
+msgstr ""
+
+#: contrib/admin/templates/registration/logged_out.html:8
+msgid "Thanks for spending some quality time with the Web site today."
+msgstr ""
+
+#: contrib/admin/templates/registration/logged_out.html:10
+msgid "Log in again"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_done.html:4
+#: contrib/admin/templates/registration/password_change_form.html:5
+#: contrib/admin/templates/registration/password_change_form.html:7
+#: contrib/admin/templates/registration/password_change_form.html:19
+msgid "Password change"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_done.html:6
+#: contrib/admin/templates/registration/password_change_done.html:10
+msgid "Password change successful"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_done.html:12
+msgid "Your password was changed."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_form.html:21
+msgid ""
+"Please enter your old password, for security's sake, and then enter your new "
+"password twice so we can verify you typed it in correctly."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_form.html:27
+#: contrib/auth/forms.py:170
+msgid "Old password"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_form.html:32
+#: contrib/auth/forms.py:144
+msgid "New password"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_change_form.html:43
+#: contrib/admin/templates/registration/password_reset_confirm.html:21
+msgid "Change my password"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_complete.html:4
+#: contrib/admin/templates/registration/password_reset_confirm.html:6
+#: contrib/admin/templates/registration/password_reset_done.html:4
+#: contrib/admin/templates/registration/password_reset_form.html:4
+#: contrib/admin/templates/registration/password_reset_form.html:6
+#: contrib/admin/templates/registration/password_reset_form.html:10
+msgid "Password reset"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_complete.html:6
+#: contrib/admin/templates/registration/password_reset_complete.html:10
+msgid "Password reset complete"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_complete.html:12
+msgid "Your password has been set. You may go ahead and log in now."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:4
+msgid "Password reset confirmation"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:12
+msgid "Enter new password"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:14
+msgid ""
+"Please enter your new password twice so we can verify you typed it in "
+"correctly."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:18
+msgid "New password:"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:20
+msgid "Confirm password:"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:26
+msgid "Password reset unsuccessful"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_confirm.html:28
+msgid ""
+"The password reset link was invalid, possibly because it has already been "
+"used. Please request a new password reset."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_done.html:6
+#: contrib/admin/templates/registration/password_reset_done.html:10
+msgid "Password reset successful"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_done.html:12
+msgid ""
+"We've e-mailed you instructions for setting your password to the e-mail "
+"address you submitted. You should be receiving it shortly."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:2
+msgid "You're receiving this e-mail because you requested a password reset"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:3
+#, python-format
+msgid "for your user account at %(site_name)s"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:5
+msgid "Please go to the following page and choose a new password:"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:9
+msgid "Your username, in case you've forgotten:"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:11
+msgid "Thanks for using our site!"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_email.html:13
+#, python-format
+msgid "The %(site_name)s team"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_form.html:12
+msgid ""
+"Forgotten your password? Enter your e-mail address below, and we'll e-mail "
+"instructions for setting a new one."
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_form.html:16
+msgid "E-mail address:"
+msgstr ""
+
+#: contrib/admin/templates/registration/password_reset_form.html:16
+msgid "Reset my password"
+msgstr ""
+
+#: contrib/admin/templatetags/admin_list.py:257
+msgid "All dates"
+msgstr ""
+
+#: contrib/admin/views/main.py:65
+#, python-format
+msgid "Select %s"
+msgstr ""
+
+#: contrib/admin/views/main.py:65
+#, python-format
+msgid "Select %s to change"
+msgstr ""
+
+#: contrib/admin/views/template.py:38 contrib/sites/models.py:38
+msgid "site"
+msgstr ""
+
+#: contrib/admin/views/template.py:40
+msgid "template"
+msgstr ""
+
+#: contrib/admindocs/views.py:61 contrib/admindocs/views.py:63
+#: contrib/admindocs/views.py:65
+msgid "tag:"
+msgstr ""
+
+#: contrib/admindocs/views.py:94 contrib/admindocs/views.py:96
+#: contrib/admindocs/views.py:98
+msgid "filter:"
+msgstr ""
+
+#: contrib/admindocs/views.py:158 contrib/admindocs/views.py:160
+#: contrib/admindocs/views.py:162
+msgid "view:"
+msgstr ""
+
+#: contrib/admindocs/views.py:190
+#, python-format
+msgid "App %r not found"
+msgstr ""
+
+#: contrib/admindocs/views.py:197
+#, python-format
+msgid "Model %(model_name)r not found in app %(app_label)r"
+msgstr ""
+
+#: contrib/admindocs/views.py:209
+#, python-format
+msgid "the related `%(app_label)s.%(data_type)s` object"
+msgstr ""
+
+#: contrib/admindocs/views.py:209 contrib/admindocs/views.py:228
+#: contrib/admindocs/views.py:233 contrib/admindocs/views.py:247
+#: contrib/admindocs/views.py:261 contrib/admindocs/views.py:266
+msgid "model:"
+msgstr ""
+
+#: contrib/admindocs/views.py:224 contrib/admindocs/views.py:256
+#, python-format
+msgid "related `%(app_label)s.%(object_name)s` objects"
+msgstr ""
+
+#: contrib/admindocs/views.py:228 contrib/admindocs/views.py:261
+#, python-format
+msgid "all %s"
+msgstr ""
+
+#: contrib/admindocs/views.py:233 contrib/admindocs/views.py:266
+#, python-format
+msgid "number of %s"
+msgstr ""
+
+#: contrib/admindocs/views.py:271
+#, python-format
+msgid "Fields on %s objects"
+msgstr ""
+
+#: contrib/admindocs/views.py:361
+#, python-format
+msgid "%s does not appear to be a urlpattern object"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:3
+msgid "Bookmarklets"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:4
+msgid "Documentation bookmarklets"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:8
+msgid ""
+"\n"
+"<p class=\"help\">To install bookmarklets, drag the link to your bookmarks\n"
+"toolbar, or right-click the link and add it to your bookmarks. Now you can\n"
+"select the bookmarklet from any page in the site. Note that some of these\n"
+"bookmarklets require you to be viewing the site from a computer designated\n"
+"as \"internal\" (talk to your system administrator if you aren't sure if\n"
+"your computer is \"internal\").</p>\n"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:18
+msgid "Documentation for this page"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:19
+msgid ""
+"Jumps you from any page to the documentation for the view that generates "
+"that page."
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:21
+msgid "Show object ID"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:22
+msgid ""
+"Shows the content-type and unique ID for pages that represent a single "
+"object."
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:24
+msgid "Edit this object (current window)"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:25
+msgid "Jumps to the admin page for pages that represent a single object."
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:27
+msgid "Edit this object (new window)"
+msgstr ""
+
+#: contrib/admindocs/templates/admin_doc/bookmarklets.html:28
+msgid "As above, but opens the admin page in a new window."
+msgstr ""
+
+#: contrib/auth/admin.py:29
+msgid "Personal info"
+msgstr ""
+
+#: contrib/auth/admin.py:30
+msgid "Permissions"
+msgstr ""
+
+#: contrib/auth/admin.py:31
+msgid "Important dates"
+msgstr ""
+
+#: contrib/auth/admin.py:32
+msgid "Groups"
+msgstr ""
+
+#: contrib/auth/admin.py:114
+msgid "Password changed successfully."
+msgstr ""
+
+#: contrib/auth/admin.py:124
+#, python-format
+msgid "Change password: %s"
+msgstr ""
+
+#: contrib/auth/forms.py:14 contrib/auth/forms.py:48 contrib/auth/forms.py:60
+msgid "Username"
+msgstr ""
+
+#: contrib/auth/forms.py:15 contrib/auth/forms.py:49
+msgid "Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only."
+msgstr ""
+
+#: contrib/auth/forms.py:16 contrib/auth/forms.py:50
+msgid "This value may contain only letters, numbers and @/./+/-/_ characters."
+msgstr ""
+
+#: contrib/auth/forms.py:18
+msgid "Password confirmation"
+msgstr ""
+
+#: contrib/auth/forms.py:31
+msgid "A user with that username already exists."
+msgstr ""
+
+#: contrib/auth/forms.py:37 contrib/auth/forms.py:156
+#: contrib/auth/forms.py:198
+msgid "The two password fields didn't match."
+msgstr ""
+
+#: contrib/auth/forms.py:83
+msgid "This account is inactive."
+msgstr ""
+
+#: contrib/auth/forms.py:88
+msgid ""
+"Your Web browser doesn't appear to have cookies enabled. Cookies are "
+"required for logging in."
+msgstr ""
+
+#: contrib/auth/forms.py:101
+msgid "E-mail"
+msgstr ""
+
+#: contrib/auth/forms.py:110
+msgid ""
+"That e-mail address doesn't have an associated user account. Are you sure "
+"you've registered?"
+msgstr ""
+
+#: contrib/auth/forms.py:136
+#, python-format
+msgid "Password reset on %s"
+msgstr ""
+
+#: contrib/auth/forms.py:145
+msgid "New password confirmation"
+msgstr ""
+
+#: contrib/auth/forms.py:178
+msgid "Your old password was entered incorrectly. Please enter it again."
+msgstr ""
+
+#: contrib/auth/models.py:66 contrib/auth/models.py:94
+msgid "name"
+msgstr ""
+
+#: contrib/auth/models.py:68
+msgid "codename"
+msgstr ""
+
+#: contrib/auth/models.py:72
+msgid "permission"
+msgstr ""
+
+#: contrib/auth/models.py:73 contrib/auth/models.py:95
+msgid "permissions"
+msgstr ""
+
+#: contrib/auth/models.py:98
+msgid "group"
+msgstr ""
+
+#: contrib/auth/models.py:99 contrib/auth/models.py:206
+msgid "groups"
+msgstr ""
+
+#: contrib/auth/models.py:196
+msgid "username"
+msgstr ""
+
+#: contrib/auth/models.py:196
+msgid ""
+"Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"
+msgstr ""
+
+#: contrib/auth/models.py:197
+msgid "first name"
+msgstr ""
+
+#: contrib/auth/models.py:198
+msgid "last name"
+msgstr ""
+
+#: contrib/auth/models.py:199
+msgid "e-mail address"
+msgstr ""
+
+#: contrib/auth/models.py:200
+msgid "password"
+msgstr ""
+
+#: contrib/auth/models.py:200
+msgid ""
+"Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change "
+"password form</a>."
+msgstr ""
+
+#: contrib/auth/models.py:201
+msgid "staff status"
+msgstr ""
+
+#: contrib/auth/models.py:201
+msgid "Designates whether the user can log into this admin site."
+msgstr ""
+
+#: contrib/auth/models.py:202
+msgid "active"
+msgstr ""
+
+#: contrib/auth/models.py:202
+msgid ""
+"Designates whether this user should be treated as active. Unselect this "
+"instead of deleting accounts."
+msgstr ""
+
+#: contrib/auth/models.py:203
+msgid "superuser status"
+msgstr ""
+
+#: contrib/auth/models.py:203
+msgid ""
+"Designates that this user has all permissions without explicitly assigning "
+"them."
+msgstr ""
+
+#: contrib/auth/models.py:204
+msgid "last login"
+msgstr ""
+
+#: contrib/auth/models.py:205
+msgid "date joined"
+msgstr ""
+
+#: contrib/auth/models.py:207
+msgid ""
+"In addition to the permissions manually assigned, this user will also get "
+"all permissions granted to each group he/she is in."
+msgstr ""
+
+#: contrib/auth/models.py:208
+msgid "user permissions"
+msgstr ""
+
+#: contrib/auth/models.py:212 contrib/comments/models.py:50
+#: contrib/comments/models.py:168
+msgid "user"
+msgstr ""
+
+#: contrib/auth/models.py:213
+msgid "users"
+msgstr ""
+
+#: contrib/auth/models.py:394
+msgid "message"
+msgstr ""
+
+#: contrib/auth/views.py:79
+msgid "Logged out"
+msgstr ""
+
+#: contrib/auth/management/commands/createsuperuser.py:23
+#: core/validators.py:120 forms/fields.py:428
+msgid "Enter a valid e-mail address."
+msgstr ""
+
+#: contrib/comments/admin.py:12
+msgid "Content"
+msgstr ""
+
+#: contrib/comments/admin.py:15
+msgid "Metadata"
+msgstr ""
+
+#: contrib/comments/admin.py:40
+msgid "flagged"
+msgid_plural "flagged"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/admin.py:41
+msgid "Flag selected comments"
+msgstr ""
+
+#: contrib/comments/admin.py:45
+msgid "approved"
+msgid_plural "approved"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/admin.py:46
+msgid "Approve selected comments"
+msgstr ""
+
+#: contrib/comments/admin.py:50
+msgid "removed"
+msgid_plural "removed"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/admin.py:51
+msgid "Remove selected comments"
+msgstr ""
+
+#: contrib/comments/admin.py:63
+#, python-format
+msgid "1 comment was successfully %(action)s."
+msgid_plural "%(count)s comments were successfully %(action)s."
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/feeds.py:13
+#, python-format
+msgid "%(site_name)s comments"
+msgstr ""
+
+#: contrib/comments/feeds.py:23
+#, python-format
+msgid "Latest comments on %(site_name)s"
+msgstr ""
+
+#: contrib/comments/forms.py:93
+msgid "Name"
+msgstr ""
+
+#: contrib/comments/forms.py:94
+msgid "Email address"
+msgstr ""
+
+#: contrib/comments/forms.py:95 contrib/flatpages/admin.py:8
+#: contrib/flatpages/models.py:7 db/models/fields/__init__.py:1101
+msgid "URL"
+msgstr ""
+
+#: contrib/comments/forms.py:96
+msgid "Comment"
+msgstr ""
+
+#: contrib/comments/forms.py:175
+#, python-format
+msgid "Watch your mouth! The word %s is not allowed here."
+msgid_plural "Watch your mouth! The words %s are not allowed here."
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/forms.py:182
+msgid ""
+"If you enter anything in this field your comment will be treated as spam"
+msgstr ""
+
+#: contrib/comments/models.py:22 contrib/contenttypes/models.py:81
+msgid "content type"
+msgstr ""
+
+#: contrib/comments/models.py:24
+msgid "object ID"
+msgstr ""
+
+#: contrib/comments/models.py:52
+msgid "user's name"
+msgstr ""
+
+#: contrib/comments/models.py:53
+msgid "user's email address"
+msgstr ""
+
+#: contrib/comments/models.py:54
+msgid "user's URL"
+msgstr ""
+
+#: contrib/comments/models.py:56 contrib/comments/models.py:76
+#: contrib/comments/models.py:169
+msgid "comment"
+msgstr ""
+
+#: contrib/comments/models.py:59
+msgid "date/time submitted"
+msgstr ""
+
+#: contrib/comments/models.py:60 db/models/fields/__init__.py:896
+msgid "IP address"
+msgstr ""
+
+#: contrib/comments/models.py:61
+msgid "is public"
+msgstr ""
+
+#: contrib/comments/models.py:62
+msgid ""
+"Uncheck this box to make the comment effectively disappear from the site."
+msgstr ""
+
+#: contrib/comments/models.py:64
+msgid "is removed"
+msgstr ""
+
+#: contrib/comments/models.py:65
+msgid ""
+"Check this box if the comment is inappropriate. A \"This comment has been "
+"removed\" message will be displayed instead."
+msgstr ""
+
+#: contrib/comments/models.py:77
+msgid "comments"
+msgstr ""
+
+#: contrib/comments/models.py:119
+msgid ""
+"This comment was posted by an authenticated user and thus the name is read-"
+"only."
+msgstr ""
+
+#: contrib/comments/models.py:128
+msgid ""
+"This comment was posted by an authenticated user and thus the email is read-"
+"only."
+msgstr ""
+
+#: contrib/comments/models.py:153
+#, python-format
+msgid ""
+"Posted by %(user)s at %(date)s\n"
+"\n"
+"%(comment)s\n"
+"\n"
+"http://%(domain)s%(url)s"
+msgstr ""
+
+#: contrib/comments/models.py:170
+msgid "flag"
+msgstr ""
+
+#: contrib/comments/models.py:171
+msgid "date"
+msgstr ""
+
+#: contrib/comments/models.py:181
+msgid "comment flag"
+msgstr ""
+
+#: contrib/comments/models.py:182
+msgid "comment flags"
+msgstr ""
+
+#: contrib/comments/templates/comments/approve.html:4
+msgid "Approve a comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/approve.html:7
+msgid "Really make this comment public?"
+msgstr ""
+
+#: contrib/comments/templates/comments/approve.html:12
+msgid "Approve"
+msgstr ""
+
+#: contrib/comments/templates/comments/approved.html:4
+msgid "Thanks for approving"
+msgstr ""
+
+#: contrib/comments/templates/comments/approved.html:7
+#: contrib/comments/templates/comments/deleted.html:7
+#: contrib/comments/templates/comments/flagged.html:7
+msgid ""
+"Thanks for taking the time to improve the quality of discussion on our site"
+msgstr ""
+
+#: contrib/comments/templates/comments/delete.html:4
+msgid "Remove a comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/delete.html:7
+msgid "Really remove this comment?"
+msgstr ""
+
+#: contrib/comments/templates/comments/deleted.html:4
+msgid "Thanks for removing"
+msgstr ""
+
+#: contrib/comments/templates/comments/flag.html:4
+msgid "Flag this comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/flag.html:7
+msgid "Really flag this comment?"
+msgstr ""
+
+#: contrib/comments/templates/comments/flag.html:12
+msgid "Flag"
+msgstr ""
+
+#: contrib/comments/templates/comments/flagged.html:4
+msgid "Thanks for flagging"
+msgstr ""
+
+#: contrib/comments/templates/comments/form.html:17
+#: contrib/comments/templates/comments/preview.html:32
+msgid "Post"
+msgstr ""
+
+#: contrib/comments/templates/comments/form.html:18
+#: contrib/comments/templates/comments/preview.html:33
+msgid "Preview"
+msgstr ""
+
+#: contrib/comments/templates/comments/posted.html:4
+msgid "Thanks for commenting"
+msgstr ""
+
+#: contrib/comments/templates/comments/posted.html:7
+msgid "Thank you for your comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/preview.html:4
+#: contrib/comments/templates/comments/preview.html:13
+msgid "Preview your comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/preview.html:11
+msgid "Please correct the error below"
+msgid_plural "Please correct the errors below"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/comments/templates/comments/preview.html:16
+msgid "Post your comment"
+msgstr ""
+
+#: contrib/comments/templates/comments/preview.html:16
+msgid "or make changes"
+msgstr ""
+
+#: contrib/contenttypes/models.py:77
+msgid "python model class name"
+msgstr ""
+
+#: contrib/contenttypes/models.py:82
+msgid "content types"
+msgstr ""
+
+#: contrib/flatpages/admin.py:9
+msgid ""
+"Example: '/about/contact/'. Make sure to have leading and trailing slashes."
+msgstr ""
+
+#: contrib/flatpages/admin.py:11
+msgid ""
+"This value must contain only letters, numbers, underscores, dashes or "
+"slashes."
+msgstr ""
+
+#: contrib/flatpages/admin.py:22
+msgid "Advanced options"
+msgstr ""
+
+#: contrib/flatpages/models.py:8
+msgid "title"
+msgstr ""
+
+#: contrib/flatpages/models.py:9
+msgid "content"
+msgstr ""
+
+#: contrib/flatpages/models.py:10
+msgid "enable comments"
+msgstr ""
+
+#: contrib/flatpages/models.py:11
+msgid "template name"
+msgstr ""
+
+#: contrib/flatpages/models.py:12
+msgid ""
+"Example: 'flatpages/contact_page.html'. If this isn't provided, the system "
+"will use 'flatpages/default.html'."
+msgstr ""
+
+#: contrib/flatpages/models.py:13
+msgid "registration required"
+msgstr ""
+
+#: contrib/flatpages/models.py:13
+msgid "If this is checked, only logged-in users will be able to view the page."
+msgstr ""
+
+#: contrib/flatpages/models.py:18
+msgid "flat page"
+msgstr ""
+
+#: contrib/flatpages/models.py:19
+msgid "flat pages"
+msgstr ""
+
+#: contrib/formtools/wizard.py:140
+msgid ""
+"We apologize, but your form has expired. Please continue filling out the "
+"form from this page."
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:50
+msgid "The base GIS field -- maps to the OpenGIS Specification Geometry type."
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:270
+msgid "Point"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:274
+msgid "Line string"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:278
+msgid "Polygon"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:282
+msgid "Multi-point"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:286
+msgid "Multi-line string"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:290
+msgid "Multi polygon"
+msgstr ""
+
+#: contrib/gis/db/models/fields.py:294
+msgid "Geometry collection"
+msgstr ""
+
+#: contrib/gis/forms/fields.py:17
+msgid "No geometry value provided."
+msgstr ""
+
+#: contrib/gis/forms/fields.py:18
+msgid "Invalid geometry value."
+msgstr ""
+
+#: contrib/gis/forms/fields.py:19
+msgid "Invalid geometry type."
+msgstr ""
+
+#: contrib/gis/forms/fields.py:20
+msgid ""
+"An error occurred when transforming the geometry to the SRID of the geometry "
+"form field."
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:19
+msgid "th"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:19
+msgid "st"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:19
+msgid "nd"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:19
+msgid "rd"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:51
+#, python-format
+msgid "%(value).1f million"
+msgid_plural "%(value).1f million"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/humanize/templatetags/humanize.py:54
+#, python-format
+msgid "%(value).1f billion"
+msgid_plural "%(value).1f billion"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/humanize/templatetags/humanize.py:57
+#, python-format
+msgid "%(value).1f trillion"
+msgid_plural "%(value).1f trillion"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "one"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "two"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "three"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "four"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "five"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "six"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "seven"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "eight"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:73
+msgid "nine"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:93
+msgid "today"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:95
+msgid "tomorrow"
+msgstr ""
+
+#: contrib/humanize/templatetags/humanize.py:97
+msgid "yesterday"
+msgstr ""
+
+#: contrib/localflavor/ar/forms.py:28
+msgid "Enter a postal code in the format NNNN or ANNNNAAA."
+msgstr ""
+
+#: contrib/localflavor/ar/forms.py:50 contrib/localflavor/br/forms.py:92
+#: contrib/localflavor/br/forms.py:131 contrib/localflavor/pe/forms.py:24
+#: contrib/localflavor/pe/forms.py:52
+msgid "This field requires only numbers."
+msgstr ""
+
+#: contrib/localflavor/ar/forms.py:51
+msgid "This field requires 7 or 8 digits."
+msgstr ""
+
+#: contrib/localflavor/ar/forms.py:80
+msgid "Enter a valid CUIT in XX-XXXXXXXX-X or XXXXXXXXXXXX format."
+msgstr ""
+
+#: contrib/localflavor/ar/forms.py:81
+msgid "Invalid CUIT."
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:5
+msgid "Burgenland"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:6
+msgid "Carinthia"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:7
+msgid "Lower Austria"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:8
+msgid "Upper Austria"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:9
+msgid "Salzburg"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:10
+msgid "Styria"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:11
+msgid "Tyrol"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:12
+msgid "Vorarlberg"
+msgstr ""
+
+#: contrib/localflavor/at/at_states.py:13
+msgid "Vienna"
+msgstr ""
+
+#: contrib/localflavor/at/forms.py:20 contrib/localflavor/ch/forms.py:17
+#: contrib/localflavor/no/forms.py:13
+msgid "Enter a zip code in the format XXXX."
+msgstr ""
+
+#: contrib/localflavor/at/forms.py:48
+msgid "Enter a valid Austrian Social Security Number in XXXX XXXXXX format."
+msgstr ""
+
+#: contrib/localflavor/au/forms.py:17
+msgid "Enter a 4 digit post code."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:17
+msgid "Enter a zip code in the format XXXXX-XXX."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:26
+msgid "Phone numbers must be in XX-XXXX-XXXX format."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:54
+msgid ""
+"Select a valid brazilian state. That state is not one of the available "
+"states."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:90
+msgid "Invalid CPF number."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:91
+msgid "This field requires at most 11 digits or 14 characters."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:130
+msgid "Invalid CNPJ number."
+msgstr ""
+
+#: contrib/localflavor/br/forms.py:132
+msgid "This field requires at least 14 digits"
+msgstr ""
+
+#: contrib/localflavor/ca/forms.py:25
+msgid "Enter a postal code in the format XXX XXX."
+msgstr ""
+
+#: contrib/localflavor/ca/forms.py:96
+msgid "Enter a valid Canadian Social Insurance number in XXX-XXX-XXX format."
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:5
+msgid "Aargau"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:6
+msgid "Appenzell Innerrhoden"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:7
+msgid "Appenzell Ausserrhoden"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:8
+msgid "Basel-Stadt"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:9
+msgid "Basel-Land"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:10
+msgid "Berne"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:11
+msgid "Fribourg"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:12
+msgid "Geneva"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:13
+msgid "Glarus"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:14
+msgid "Graubuenden"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:15
+msgid "Jura"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:16
+msgid "Lucerne"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:17
+msgid "Neuchatel"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:18
+msgid "Nidwalden"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:19
+msgid "Obwalden"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:20
+msgid "Schaffhausen"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:21
+msgid "Schwyz"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:22
+msgid "Solothurn"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:23
+msgid "St. Gallen"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:24
+msgid "Thurgau"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:25
+msgid "Ticino"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:26
+msgid "Uri"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:27
+msgid "Valais"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:28
+msgid "Vaud"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:29
+msgid "Zug"
+msgstr ""
+
+#: contrib/localflavor/ch/ch_states.py:30
+msgid "Zurich"
+msgstr ""
+
+#: contrib/localflavor/ch/forms.py:65
+msgid ""
+"Enter a valid Swiss identity or passport card number in X1234567<0 or "
+"1234567890 format."
+msgstr ""
+
+#: contrib/localflavor/cl/forms.py:30
+msgid "Enter a valid Chilean RUT."
+msgstr ""
+
+#: contrib/localflavor/cl/forms.py:31
+msgid "Enter a valid Chilean RUT. The format is XX.XXX.XXX-X."
+msgstr ""
+
+#: contrib/localflavor/cl/forms.py:32
+msgid "The Chilean RUT is not valid."
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:8
+msgid "Prague"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:9
+msgid "Central Bohemian Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:10
+msgid "South Bohemian Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:11
+msgid "Pilsen Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:12
+msgid "Carlsbad Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:13
+msgid "Usti Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:14
+msgid "Liberec Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:15
+msgid "Hradec Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:16
+msgid "Pardubice Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:17
+msgid "Vysocina Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:18
+msgid "South Moravian Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:19
+msgid "Olomouc Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:20
+msgid "Zlin Region"
+msgstr ""
+
+#: contrib/localflavor/cz/cz_regions.py:21
+msgid "Moravian-Silesian Region"
+msgstr ""
+
+#: contrib/localflavor/cz/forms.py:28 contrib/localflavor/sk/forms.py:30
+msgid "Enter a postal code in the format XXXXX or XXX XX."
+msgstr ""
+
+#: contrib/localflavor/cz/forms.py:48
+msgid "Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX."
+msgstr ""
+
+#: contrib/localflavor/cz/forms.py:49
+msgid "Invalid optional parameter Gender, valid values are 'f' and 'm'"
+msgstr ""
+
+#: contrib/localflavor/cz/forms.py:50
+msgid "Enter a valid birth number."
+msgstr ""
+
+#: contrib/localflavor/cz/forms.py:107
+msgid "Enter a valid IC number."
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:5
+msgid "Baden-Wuerttemberg"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:6
+msgid "Bavaria"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:7
+msgid "Berlin"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:8
+msgid "Brandenburg"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:9
+msgid "Bremen"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:10
+msgid "Hamburg"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:11
+msgid "Hessen"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:12
+msgid "Mecklenburg-Western Pomerania"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:13
+msgid "Lower Saxony"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:14
+msgid "North Rhine-Westphalia"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:15
+msgid "Rhineland-Palatinate"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:16
+msgid "Saarland"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:17
+msgid "Saxony"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:18
+msgid "Saxony-Anhalt"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:19
+msgid "Schleswig-Holstein"
+msgstr ""
+
+#: contrib/localflavor/de/de_states.py:20
+msgid "Thuringia"
+msgstr ""
+
+#: contrib/localflavor/de/forms.py:15 contrib/localflavor/fi/forms.py:13
+#: contrib/localflavor/fr/forms.py:16
+msgid "Enter a zip code in the format XXXXX."
+msgstr ""
+
+#: contrib/localflavor/de/forms.py:42
+msgid ""
+"Enter a valid German identity card number in XXXXXXXXXXX-XXXXXXX-XXXXXXX-X "
+"format."
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:5
+msgid "Arava"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:6
+msgid "Albacete"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:7
+msgid "Alacant"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:8
+msgid "Almeria"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:9
+msgid "Avila"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:10
+msgid "Badajoz"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:11
+msgid "Illes Balears"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:12
+msgid "Barcelona"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:13
+msgid "Burgos"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:14
+msgid "Caceres"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:15
+msgid "Cadiz"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:16
+msgid "Castello"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:17
+msgid "Ciudad Real"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:18
+msgid "Cordoba"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:19
+msgid "A Coruna"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:20
+msgid "Cuenca"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:21
+msgid "Girona"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:22
+msgid "Granada"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:23
+msgid "Guadalajara"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:24
+msgid "Guipuzkoa"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:25
+msgid "Huelva"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:26
+msgid "Huesca"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:27
+msgid "Jaen"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:28
+msgid "Leon"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:29
+msgid "Lleida"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:30
+#: contrib/localflavor/es/es_regions.py:17
+msgid "La Rioja"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:31
+msgid "Lugo"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:32
+#: contrib/localflavor/es/es_regions.py:18
+msgid "Madrid"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:33
+msgid "Malaga"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:34
+msgid "Murcia"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:35
+msgid "Navarre"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:36
+msgid "Ourense"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:37
+msgid "Asturias"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:38
+msgid "Palencia"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:39
+msgid "Las Palmas"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:40
+msgid "Pontevedra"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:41
+msgid "Salamanca"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:42
+msgid "Santa Cruz de Tenerife"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:43
+#: contrib/localflavor/es/es_regions.py:11
+msgid "Cantabria"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:44
+msgid "Segovia"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:45
+msgid "Seville"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:46
+msgid "Soria"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:47
+msgid "Tarragona"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:48
+msgid "Teruel"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:49
+msgid "Toledo"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:50
+msgid "Valencia"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:51
+msgid "Valladolid"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:52
+msgid "Bizkaia"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:53
+msgid "Zamora"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:54
+msgid "Zaragoza"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:55
+msgid "Ceuta"
+msgstr ""
+
+#: contrib/localflavor/es/es_provinces.py:56
+msgid "Melilla"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:5
+msgid "Andalusia"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:6
+msgid "Aragon"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:7
+msgid "Principality of Asturias"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:8
+msgid "Balearic Islands"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:9
+msgid "Basque Country"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:10
+msgid "Canary Islands"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:12
+msgid "Castile-La Mancha"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:13
+msgid "Castile and Leon"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:14
+msgid "Catalonia"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:15
+msgid "Extremadura"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:16
+msgid "Galicia"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:19
+msgid "Region of Murcia"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:20
+msgid "Foral Community of Navarre"
+msgstr ""
+
+#: contrib/localflavor/es/es_regions.py:21
+msgid "Valencian Community"
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:20
+msgid "Enter a valid postal code in the range and format 01XXX - 52XXX."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:40
+msgid ""
+"Enter a valid phone number in one of the formats 6XXXXXXXX, 8XXXXXXXX or "
+"9XXXXXXXX."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:67
+msgid "Please enter a valid NIF, NIE, or CIF."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:68
+msgid "Please enter a valid NIF or NIE."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:69
+msgid "Invalid checksum for NIF."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:70
+msgid "Invalid checksum for NIE."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:71
+msgid "Invalid checksum for CIF."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:143
+msgid ""
+"Please enter a valid bank account number in format XXXX-XXXX-XX-XXXXXXXXXX."
+msgstr ""
+
+#: contrib/localflavor/es/forms.py:144
+msgid "Invalid checksum for bank account number."
+msgstr ""
+
+#: contrib/localflavor/fi/forms.py:29
+msgid "Enter a valid Finnish social security number."
+msgstr ""
+
+#: contrib/localflavor/fr/forms.py:31
+msgid "Phone numbers must be in 0X XX XX XX XX format."
+msgstr ""
+
+#: contrib/localflavor/id/forms.py:28
+msgid "Enter a valid post code"
+msgstr ""
+
+#: contrib/localflavor/id/forms.py:68 contrib/localflavor/nl/forms.py:53
+msgid "Enter a valid phone number"
+msgstr ""
+
+#: contrib/localflavor/id/forms.py:107
+msgid "Enter a valid vehicle license plate number"
+msgstr ""
+
+#: contrib/localflavor/id/forms.py:170
+msgid "Enter a valid NIK/KTP number"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:9
+#: contrib/localflavor/id/id_choices.py:73
+msgid "Bali"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:10
+#: contrib/localflavor/id/id_choices.py:45
+msgid "Banten"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:11
+#: contrib/localflavor/id/id_choices.py:54
+msgid "Bengkulu"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:12
+#: contrib/localflavor/id/id_choices.py:47
+msgid "Yogyakarta"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:13
+#: contrib/localflavor/id/id_choices.py:51
+msgid "Jakarta"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:14
+#: contrib/localflavor/id/id_choices.py:75
+msgid "Gorontalo"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:15
+#: contrib/localflavor/id/id_choices.py:57
+msgid "Jambi"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:16
+msgid "Jawa Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:17
+msgid "Jawa Tengah"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:18
+msgid "Jawa Timur"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:19
+#: contrib/localflavor/id/id_choices.py:88
+msgid "Kalimantan Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:20
+#: contrib/localflavor/id/id_choices.py:66
+msgid "Kalimantan Selatan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:21
+#: contrib/localflavor/id/id_choices.py:89
+msgid "Kalimantan Tengah"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:22
+#: contrib/localflavor/id/id_choices.py:90
+msgid "Kalimantan Timur"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:23
+msgid "Kepulauan Bangka-Belitung"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:24
+#: contrib/localflavor/id/id_choices.py:62
+msgid "Kepulauan Riau"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:25
+#: contrib/localflavor/id/id_choices.py:55
+msgid "Lampung"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:26
+#: contrib/localflavor/id/id_choices.py:70
+msgid "Maluku"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:27
+#: contrib/localflavor/id/id_choices.py:71
+msgid "Maluku Utara"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:28
+#: contrib/localflavor/id/id_choices.py:59
+msgid "Nanggroe Aceh Darussalam"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:29
+msgid "Nusa Tenggara Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:30
+msgid "Nusa Tenggara Timur"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:31
+msgid "Papua"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:32
+msgid "Papua Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:33
+#: contrib/localflavor/id/id_choices.py:60
+msgid "Riau"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:34
+#: contrib/localflavor/id/id_choices.py:68
+msgid "Sulawesi Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:35
+#: contrib/localflavor/id/id_choices.py:69
+msgid "Sulawesi Selatan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:36
+#: contrib/localflavor/id/id_choices.py:76
+msgid "Sulawesi Tengah"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:37
+#: contrib/localflavor/id/id_choices.py:79
+msgid "Sulawesi Tenggara"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:38
+msgid "Sulawesi Utara"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:39
+#: contrib/localflavor/id/id_choices.py:52
+msgid "Sumatera Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:40
+#: contrib/localflavor/id/id_choices.py:56
+msgid "Sumatera Selatan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:41
+#: contrib/localflavor/id/id_choices.py:58
+msgid "Sumatera Utara"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:46
+msgid "Magelang"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:48
+msgid "Surakarta - Solo"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:49
+msgid "Madiun"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:50
+msgid "Kediri"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:53
+msgid "Tapanuli"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:61
+msgid "Kepulauan Bangka Belitung"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:63
+msgid "Corps Consulate"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:64
+msgid "Corps Diplomatic"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:65
+msgid "Bandung"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:67
+msgid "Sulawesi Utara Daratan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:72
+msgid "NTT - Timor"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:74
+msgid "Sulawesi Utara Kepulauan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:77
+msgid "NTB - Lombok"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:78
+msgid "Papua dan Papua Barat"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:80
+msgid "Cirebon"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:81
+msgid "NTB - Sumbawa"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:82
+msgid "NTT - Flores"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:83
+msgid "NTT - Sumba"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:84
+msgid "Bogor"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:85
+msgid "Pekalongan"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:86
+msgid "Semarang"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:87
+msgid "Pati"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:91
+msgid "Surabaya"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:92
+msgid "Madura"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:93
+msgid "Malang"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:94
+msgid "Jember"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:95
+msgid "Banyumas"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:96
+msgid "Federal Government"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:97
+msgid "Bojonegoro"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:98
+msgid "Purwakarta"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:99
+msgid "Sidoarjo"
+msgstr ""
+
+#: contrib/localflavor/id/id_choices.py:100
+msgid "Garut"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:8
+msgid "Antrim"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:9
+msgid "Armagh"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:10
+msgid "Carlow"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:11
+msgid "Cavan"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:12
+msgid "Clare"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:13
+msgid "Cork"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:14
+msgid "Derry"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:15
+msgid "Donegal"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:16
+msgid "Down"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:17
+msgid "Dublin"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:18
+msgid "Fermanagh"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:19
+msgid "Galway"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:20
+msgid "Kerry"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:21
+msgid "Kildare"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:22
+msgid "Kilkenny"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:23
+msgid "Laois"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:24
+msgid "Leitrim"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:25
+msgid "Limerick"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:26
+msgid "Longford"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:27
+msgid "Louth"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:28
+msgid "Mayo"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:29
+msgid "Meath"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:30
+msgid "Monaghan"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:31
+msgid "Offaly"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:32
+msgid "Roscommon"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:33
+msgid "Sligo"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:34
+msgid "Tipperary"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:35
+msgid "Tyrone"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:36
+msgid "Waterford"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:37
+msgid "Westmeath"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:38
+msgid "Wexford"
+msgstr ""
+
+#: contrib/localflavor/ie/ie_counties.py:39
+msgid "Wicklow"
+msgstr ""
+
+#: contrib/localflavor/in_/forms.py:15
+msgid "Enter a zip code in the format XXXXXXX."
+msgstr ""
+
+#: contrib/localflavor/is_/forms.py:18
+msgid ""
+"Enter a valid Icelandic identification number. The format is XXXXXX-XXXX."
+msgstr ""
+
+#: contrib/localflavor/is_/forms.py:19
+msgid "The Icelandic identification number is not valid."
+msgstr ""
+
+#: contrib/localflavor/it/forms.py:15
+msgid "Enter a valid zip code."
+msgstr ""
+
+#: contrib/localflavor/it/forms.py:44
+msgid "Enter a valid Social Security number."
+msgstr ""
+
+#: contrib/localflavor/it/forms.py:69
+msgid "Enter a valid VAT number."
+msgstr ""
+
+#: contrib/localflavor/jp/forms.py:16
+msgid "Enter a postal code in the format XXXXXXX or XXX-XXXX."
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:4
+msgid "Hokkaido"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:5
+msgid "Aomori"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:6
+msgid "Iwate"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:7
+msgid "Miyagi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:8
+msgid "Akita"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:9
+msgid "Yamagata"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:10
+msgid "Fukushima"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:11
+msgid "Ibaraki"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:12
+msgid "Tochigi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:13
+msgid "Gunma"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:14
+msgid "Saitama"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:15
+msgid "Chiba"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:16
+msgid "Tokyo"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:17
+msgid "Kanagawa"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:18
+msgid "Yamanashi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:19
+msgid "Nagano"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:20
+msgid "Niigata"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:21
+msgid "Toyama"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:22
+msgid "Ishikawa"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:23
+msgid "Fukui"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:24
+msgid "Gifu"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:25
+msgid "Shizuoka"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:26
+msgid "Aichi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:27
+msgid "Mie"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:28
+msgid "Shiga"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:29
+msgid "Kyoto"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:30
+msgid "Osaka"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:31
+msgid "Hyogo"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:32
+msgid "Nara"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:33
+msgid "Wakayama"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:34
+msgid "Tottori"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:35
+msgid "Shimane"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:36
+msgid "Okayama"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:37
+msgid "Hiroshima"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:38
+msgid "Yamaguchi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:39
+msgid "Tokushima"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:40
+msgid "Kagawa"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:41
+msgid "Ehime"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:42
+msgid "Kochi"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:43
+msgid "Fukuoka"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:44
+msgid "Saga"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:45
+msgid "Nagasaki"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:46
+msgid "Kumamoto"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:47
+msgid "Oita"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:48
+msgid "Miyazaki"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:49
+msgid "Kagoshima"
+msgstr ""
+
+#: contrib/localflavor/jp/jp_prefectures.py:50
+msgid "Okinawa"
+msgstr ""
+
+#: contrib/localflavor/kw/forms.py:25
+msgid "Enter a valid Kuwaiti Civil ID number"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:12
+msgid "Aguascalientes"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:13
+msgid "Baja California"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:14
+msgid "Baja California Sur"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:15
+msgid "Campeche"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:16
+msgid "Chihuahua"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:17
+msgid "Chiapas"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:18
+msgid "Coahuila"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:19
+msgid "Colima"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:20
+msgid "Distrito Federal"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:21
+msgid "Durango"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:22
+msgid "Guerrero"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:23
+msgid "Guanajuato"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:24
+msgid "Hidalgo"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:25
+msgid "Jalisco"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:26
+msgid "Estado de México"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:27
+msgid "Michoacán"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:28
+msgid "Morelos"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:29
+msgid "Nayarit"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:30
+msgid "Nuevo León"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:31
+msgid "Oaxaca"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:32
+msgid "Puebla"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:33
+msgid "Querétaro"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:34
+msgid "Quintana Roo"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:35
+msgid "Sinaloa"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:36
+msgid "San Luis PotosÃ"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:37
+msgid "Sonora"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:38
+msgid "Tabasco"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:39
+msgid "Tamaulipas"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:40
+msgid "Tlaxcala"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:41
+msgid "Veracruz"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:42
+msgid "Yucatán"
+msgstr ""
+
+#: contrib/localflavor/mx/mx_states.py:43
+msgid "Zacatecas"
+msgstr ""
+
+#: contrib/localflavor/nl/forms.py:22
+msgid "Enter a valid postal code"
+msgstr ""
+
+#: contrib/localflavor/nl/forms.py:79
+msgid "Enter a valid SoFi number"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:4
+msgid "Drenthe"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:5
+msgid "Flevoland"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:6
+msgid "Friesland"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:7
+msgid "Gelderland"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:8
+msgid "Groningen"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:9
+msgid "Limburg"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:10
+msgid "Noord-Brabant"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:11
+msgid "Noord-Holland"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:12
+msgid "Overijssel"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:13
+msgid "Utrecht"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:14
+msgid "Zeeland"
+msgstr ""
+
+#: contrib/localflavor/nl/nl_provinces.py:15
+msgid "Zuid-Holland"
+msgstr ""
+
+#: contrib/localflavor/no/forms.py:34
+msgid "Enter a valid Norwegian social security number."
+msgstr ""
+
+#: contrib/localflavor/pe/forms.py:25
+msgid "This field requires 8 digits."
+msgstr ""
+
+#: contrib/localflavor/pe/forms.py:53
+msgid "This field requires 11 digits."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:38
+msgid "National Identification Number consists of 11 digits."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:39
+msgid "Wrong checksum for the National Identification Number."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:71
+msgid ""
+"Enter a tax number field (NIP) in the format XXX-XXX-XX-XX or XX-XX-XXX-XXX."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:72
+msgid "Wrong checksum for the Tax Number (NIP)."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:109
+msgid "National Business Register Number (REGON) consists of 9 or 14 digits."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:110
+msgid "Wrong checksum for the National Business Register Number (REGON)."
+msgstr ""
+
+#: contrib/localflavor/pl/forms.py:148
+msgid "Enter a postal code in the format XX-XXX."
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:8
+msgid "Lower Silesia"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:9
+msgid "Kuyavia-Pomerania"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:10
+msgid "Lublin"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:11
+msgid "Lubusz"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:12
+msgid "Lodz"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:13
+msgid "Lesser Poland"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:14
+msgid "Masovia"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:15
+msgid "Opole"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:16
+msgid "Subcarpatia"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:17
+msgid "Podlasie"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:18
+msgid "Pomerania"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:19
+msgid "Silesia"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:20
+msgid "Swietokrzyskie"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:21
+msgid "Warmia-Masuria"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:22
+msgid "Greater Poland"
+msgstr ""
+
+#: contrib/localflavor/pl/pl_voivodeships.py:23
+msgid "West Pomerania"
+msgstr ""
+
+#: contrib/localflavor/pt/forms.py:17
+msgid "Enter a zip code in the format XXXX-XXX."
+msgstr ""
+
+#: contrib/localflavor/pt/forms.py:37
+msgid "Phone numbers must have 9 digits, or start by + or 00."
+msgstr ""
+
+#: contrib/localflavor/ro/forms.py:19
+msgid "Enter a valid CIF."
+msgstr ""
+
+#: contrib/localflavor/ro/forms.py:56
+msgid "Enter a valid CNP."
+msgstr ""
+
+#: contrib/localflavor/ro/forms.py:141
+msgid "Enter a valid IBAN in ROXX-XXXX-XXXX-XXXX-XXXX-XXXX format"
+msgstr ""
+
+#: contrib/localflavor/ro/forms.py:171
+msgid "Phone numbers must be in XXXX-XXXXXX format."
+msgstr ""
+
+#: contrib/localflavor/ro/forms.py:194
+msgid "Enter a valid postal code in the format XXXXXX"
+msgstr ""
+
+#: contrib/localflavor/se/forms.py:50
+msgid "Enter a valid Swedish organisation number."
+msgstr ""
+
+#: contrib/localflavor/se/forms.py:107
+msgid "Enter a valid Swedish personal identity number."
+msgstr ""
+
+#: contrib/localflavor/se/forms.py:108
+msgid "Co-ordination numbers are not allowed."
+msgstr ""
+
+#: contrib/localflavor/se/forms.py:150
+msgid "Enter a Swedish postal code in the format XXXXX."
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:15
+msgid "Stockholm"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:16
+msgid "Västerbotten"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:17
+msgid "Norrbotten"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:18
+msgid "Uppsala"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:19
+msgid "Södermanland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:20
+msgid "Östergötland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:21
+msgid "Jönköping"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:22
+msgid "Kronoberg"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:23
+msgid "Kalmar"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:24
+msgid "Gotland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:25
+msgid "Blekinge"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:26
+msgid "Skåne"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:27
+msgid "Halland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:28
+msgid "Västra Götaland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:29
+msgid "Värmland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:30
+msgid "Örebro"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:31
+msgid "Västmanland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:32
+msgid "Dalarna"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:33
+msgid "Gävleborg"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:34
+msgid "Västernorrland"
+msgstr ""
+
+#: contrib/localflavor/se/se_counties.py:35
+msgid "Jämtland"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:8
+msgid "Banska Bystrica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:9
+msgid "Banska Stiavnica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:10
+msgid "Bardejov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:11
+msgid "Banovce nad Bebravou"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:12
+msgid "Brezno"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:13
+msgid "Bratislava I"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:14
+msgid "Bratislava II"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:15
+msgid "Bratislava III"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:16
+msgid "Bratislava IV"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:17
+msgid "Bratislava V"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:18
+msgid "Bytca"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:19
+msgid "Cadca"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:20
+msgid "Detva"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:21
+msgid "Dolny Kubin"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:22
+msgid "Dunajska Streda"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:23
+msgid "Galanta"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:24
+msgid "Gelnica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:25
+msgid "Hlohovec"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:26
+msgid "Humenne"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:27
+msgid "Ilava"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:28
+msgid "Kezmarok"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:29
+msgid "Komarno"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:30
+msgid "Kosice I"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:31
+msgid "Kosice II"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:32
+msgid "Kosice III"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:33
+msgid "Kosice IV"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:34
+msgid "Kosice - okolie"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:35
+msgid "Krupina"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:36
+msgid "Kysucke Nove Mesto"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:37
+msgid "Levice"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:38
+msgid "Levoca"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:39
+msgid "Liptovsky Mikulas"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:40
+msgid "Lucenec"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:41
+msgid "Malacky"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:42
+msgid "Martin"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:43
+msgid "Medzilaborce"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:44
+msgid "Michalovce"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:45
+msgid "Myjava"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:46
+msgid "Namestovo"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:47
+msgid "Nitra"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:48
+msgid "Nove Mesto nad Vahom"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:49
+msgid "Nove Zamky"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:50
+msgid "Partizanske"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:51
+msgid "Pezinok"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:52
+msgid "Piestany"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:53
+msgid "Poltar"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:54
+msgid "Poprad"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:55
+msgid "Povazska Bystrica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:56
+msgid "Presov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:57
+msgid "Prievidza"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:58
+msgid "Puchov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:59
+msgid "Revuca"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:60
+msgid "Rimavska Sobota"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:61
+msgid "Roznava"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:62
+msgid "Ruzomberok"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:63
+msgid "Sabinov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:64
+msgid "Senec"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:65
+msgid "Senica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:66
+msgid "Skalica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:67
+msgid "Snina"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:68
+msgid "Sobrance"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:69
+msgid "Spisska Nova Ves"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:70
+msgid "Stara Lubovna"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:71
+msgid "Stropkov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:72
+msgid "Svidnik"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:73
+msgid "Sala"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:74
+msgid "Topolcany"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:75
+msgid "Trebisov"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:76
+msgid "Trencin"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:77
+msgid "Trnava"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:78
+msgid "Turcianske Teplice"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:79
+msgid "Tvrdosin"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:80
+msgid "Velky Krtis"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:81
+msgid "Vranov nad Toplou"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:82
+msgid "Zlate Moravce"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:83
+msgid "Zvolen"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:84
+msgid "Zarnovica"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:85
+msgid "Ziar nad Hronom"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_districts.py:86
+msgid "Zilina"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:8
+msgid "Banska Bystrica region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:9
+msgid "Bratislava region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:10
+msgid "Kosice region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:11
+msgid "Nitra region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:12
+msgid "Presov region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:13
+msgid "Trencin region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:14
+msgid "Trnava region"
+msgstr ""
+
+#: contrib/localflavor/sk/sk_regions.py:15
+msgid "Zilina region"
+msgstr ""
+
+#: contrib/localflavor/uk/forms.py:21
+msgid "Enter a valid postcode."
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:11
+msgid "Bedfordshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:12
+msgid "Buckinghamshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:14
+msgid "Cheshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:15
+msgid "Cornwall and Isles of Scilly"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:16
+msgid "Cumbria"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:17
+msgid "Derbyshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:18
+msgid "Devon"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:19
+msgid "Dorset"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:20
+msgid "Durham"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:21
+msgid "East Sussex"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:22
+msgid "Essex"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:23
+msgid "Gloucestershire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:24
+msgid "Greater London"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:25
+msgid "Greater Manchester"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:26
+msgid "Hampshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:27
+msgid "Hertfordshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:28
+msgid "Kent"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:29
+msgid "Lancashire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:30
+msgid "Leicestershire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:31
+msgid "Lincolnshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:32
+msgid "Merseyside"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:33
+msgid "Norfolk"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:34
+msgid "North Yorkshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:35
+msgid "Northamptonshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:36
+msgid "Northumberland"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:37
+msgid "Nottinghamshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:38
+msgid "Oxfordshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:39
+msgid "Shropshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:40
+msgid "Somerset"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:41
+msgid "South Yorkshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:42
+msgid "Staffordshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:43
+msgid "Suffolk"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:44
+msgid "Surrey"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:45
+msgid "Tyne and Wear"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:46
+msgid "Warwickshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:47
+msgid "West Midlands"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:48
+msgid "West Sussex"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:49
+msgid "West Yorkshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:50
+msgid "Wiltshire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:51
+msgid "Worcestershire"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:55
+msgid "County Antrim"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:56
+msgid "County Armagh"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:57
+msgid "County Down"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:58
+msgid "County Fermanagh"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:59
+msgid "County Londonderry"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:60
+msgid "County Tyrone"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:64
+msgid "Clwyd"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:65
+msgid "Dyfed"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:66
+msgid "Gwent"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:67
+msgid "Gwynedd"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:68
+msgid "Mid Glamorgan"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:69
+msgid "Powys"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:70
+msgid "South Glamorgan"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:71
+msgid "West Glamorgan"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:75
+msgid "Borders"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:76
+msgid "Central Scotland"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:77
+msgid "Dumfries and Galloway"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:78
+msgid "Fife"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:79
+msgid "Grampian"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:80
+msgid "Highland"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:81
+msgid "Lothian"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:82
+msgid "Orkney Islands"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:83
+msgid "Shetland Islands"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:84
+msgid "Strathclyde"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:85
+msgid "Tayside"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:86
+msgid "Western Isles"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:90
+msgid "England"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:91
+msgid "Northern Ireland"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:92
+msgid "Scotland"
+msgstr ""
+
+#: contrib/localflavor/uk/uk_regions.py:93
+msgid "Wales"
+msgstr ""
+
+#: contrib/localflavor/us/forms.py:17
+msgid "Enter a zip code in the format XXXXX or XXXXX-XXXX."
+msgstr ""
+
+#: contrib/localflavor/us/forms.py:26
+msgid "Phone numbers must be in XXX-XXX-XXXX format."
+msgstr ""
+
+#: contrib/localflavor/us/forms.py:55
+msgid "Enter a valid U.S. Social Security number in XXX-XX-XXXX format."
+msgstr ""
+
+#: contrib/localflavor/us/forms.py:88
+msgid "Enter a U.S. state or territory."
+msgstr ""
+
+#: contrib/localflavor/us/models.py:8
+msgid "U.S. state (two uppercase letters)"
+msgstr ""
+
+#: contrib/localflavor/us/models.py:17
+msgid "Phone number"
+msgstr ""
+
+#: contrib/localflavor/uy/forms.py:28
+msgid "Enter a valid CI number in X.XXX.XXX-X,XXXXXXX-X or XXXXXXXX format."
+msgstr ""
+
+#: contrib/localflavor/uy/forms.py:30
+msgid "Enter a valid CI number."
+msgstr ""
+
+#: contrib/localflavor/za/forms.py:21
+msgid "Enter a valid South African ID number"
+msgstr ""
+
+#: contrib/localflavor/za/forms.py:55
+msgid "Enter a valid South African postal code"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:4
+msgid "Eastern Cape"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:5
+msgid "Free State"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:6
+msgid "Gauteng"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:7
+msgid "KwaZulu-Natal"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:8
+msgid "Limpopo"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:9
+msgid "Mpumalanga"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:10
+msgid "Northern Cape"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:11
+msgid "North West"
+msgstr ""
+
+#: contrib/localflavor/za/za_provinces.py:12
+msgid "Western Cape"
+msgstr ""
+
+#: contrib/messages/tests/base.py:101
+msgid "lazy message"
+msgstr ""
+
+#: contrib/redirects/models.py:7
+msgid "redirect from"
+msgstr ""
+
+#: contrib/redirects/models.py:8
+msgid ""
+"This should be an absolute path, excluding the domain name. Example: '/"
+"events/search/'."
+msgstr ""
+
+#: contrib/redirects/models.py:9
+msgid "redirect to"
+msgstr ""
+
+#: contrib/redirects/models.py:10
+msgid ""
+"This can be either an absolute path (as above) or a full URL starting with "
+"'http://'."
+msgstr ""
+
+#: contrib/redirects/models.py:13
+msgid "redirect"
+msgstr ""
+
+#: contrib/redirects/models.py:14
+msgid "redirects"
+msgstr ""
+
+#: contrib/sessions/models.py:45
+msgid "session key"
+msgstr ""
+
+#: contrib/sessions/models.py:47
+msgid "session data"
+msgstr ""
+
+#: contrib/sessions/models.py:48
+msgid "expire date"
+msgstr ""
+
+#: contrib/sessions/models.py:53
+msgid "session"
+msgstr ""
+
+#: contrib/sessions/models.py:54
+msgid "sessions"
+msgstr ""
+
+#: contrib/sites/models.py:32
+msgid "domain name"
+msgstr ""
+
+#: contrib/sites/models.py:33
+msgid "display name"
+msgstr ""
+
+#: contrib/sites/models.py:39
+msgid "sites"
+msgstr ""
+
+#: core/validators.py:20 forms/fields.py:66
+msgid "Enter a valid value."
+msgstr ""
+
+#: core/validators.py:87 forms/fields.py:529
+msgid "Enter a valid URL."
+msgstr ""
+
+#: core/validators.py:89 forms/fields.py:530
+msgid "This URL appears to be a broken link."
+msgstr ""
+
+#: core/validators.py:123 forms/fields.py:873
+msgid ""
+"Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+#: core/validators.py:126 forms/fields.py:866
+msgid "Enter a valid IPv4 address."
+msgstr ""
+
+#: core/validators.py:129 db/models/fields/__init__.py:572
+msgid "Enter only digits separated by commas."
+msgstr ""
+
+#: core/validators.py:135
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr ""
+
+#: core/validators.py:153 forms/fields.py:205 forms/fields.py:257
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr ""
+
+#: core/validators.py:158 forms/fields.py:206 forms/fields.py:258
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr ""
+
+#: core/validators.py:164
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d characters (it has %"
+"(show_value)d)."
+msgstr ""
+
+#: core/validators.py:170
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d characters (it has %"
+"(show_value)d)."
+msgstr ""
+
+#: db/models/base.py:823
+#, python-format
+msgid "%(field_name)s must be unique for %(date_field)s %(lookup)s."
+msgstr ""
+
+#: db/models/base.py:838 db/models/base.py:846
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr ""
+
+#: db/models/fields/__init__.py:63
+#, python-format
+msgid "Value %r is not a valid choice."
+msgstr ""
+
+#: db/models/fields/__init__.py:64
+msgid "This field cannot be null."
+msgstr ""
+
+#: db/models/fields/__init__.py:65
+msgid "This field cannot be blank."
+msgstr ""
+
+#: db/models/fields/__init__.py:70
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr ""
+
+#: db/models/fields/__init__.py:451 db/models/fields/__init__.py:852
+#: db/models/fields/__init__.py:961 db/models/fields/__init__.py:972
+#: db/models/fields/__init__.py:999
+msgid "Integer"
+msgstr ""
+
+#: db/models/fields/__init__.py:455 db/models/fields/__init__.py:850
+msgid "This value must be an integer."
+msgstr ""
+
+#: db/models/fields/__init__.py:490
+msgid "This value must be either True or False."
+msgstr ""
+
+#: db/models/fields/__init__.py:492
+msgid "Boolean (Either True or False)"
+msgstr ""
+
+#: db/models/fields/__init__.py:539 db/models/fields/__init__.py:982
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr ""
+
+#: db/models/fields/__init__.py:567
+msgid "Comma-separated integers"
+msgstr ""
+
+#: db/models/fields/__init__.py:581
+msgid "Date (without time)"
+msgstr ""
+
+#: db/models/fields/__init__.py:585
+msgid "Enter a valid date in YYYY-MM-DD format."
+msgstr ""
+
+#: db/models/fields/__init__.py:586
+#, python-format
+msgid "Invalid date: %s"
+msgstr ""
+
+#: db/models/fields/__init__.py:667
+msgid "Enter a valid date/time in YYYY-MM-DD HH:MM[:ss[.uuuuuu]] format."
+msgstr ""
+
+#: db/models/fields/__init__.py:669
+msgid "Date (with time)"
+msgstr ""
+
+#: db/models/fields/__init__.py:735
+msgid "This value must be a decimal number."
+msgstr ""
+
+#: db/models/fields/__init__.py:737
+msgid "Decimal number"
+msgstr ""
+
+#: db/models/fields/__init__.py:792
+msgid "E-mail address"
+msgstr ""
+
+#: db/models/fields/__init__.py:799 db/models/fields/files.py:220
+#: db/models/fields/files.py:331
+msgid "File path"
+msgstr ""
+
+#: db/models/fields/__init__.py:822
+msgid "This value must be a float."
+msgstr ""
+
+#: db/models/fields/__init__.py:824
+msgid "Floating point number"
+msgstr ""
+
+#: db/models/fields/__init__.py:883
+msgid "Big (8 byte) integer"
+msgstr ""
+
+#: db/models/fields/__init__.py:912
+msgid "This value must be either None, True or False."
+msgstr ""
+
+#: db/models/fields/__init__.py:914
+msgid "Boolean (Either True, False or None)"
+msgstr ""
+
+#: db/models/fields/__init__.py:1005
+msgid "Text"
+msgstr ""
+
+#: db/models/fields/__init__.py:1021
+msgid "Time"
+msgstr ""
+
+#: db/models/fields/__init__.py:1025
+msgid "Enter a valid time in HH:MM[:ss[.uuuuuu]] format."
+msgstr ""
+
+#: db/models/fields/__init__.py:1109
+msgid "XML text"
+msgstr ""
+
+#: db/models/fields/related.py:799
+#, python-format
+msgid "Model %(model)s with pk %(pk)r does not exist."
+msgstr ""
+
+#: db/models/fields/related.py:801
+msgid "Foreign Key (type determined by related field)"
+msgstr ""
+
+#: db/models/fields/related.py:918
+msgid "One-to-one relationship"
+msgstr ""
+
+#: db/models/fields/related.py:980
+msgid "Many-to-many relationship"
+msgstr ""
+
+#: db/models/fields/related.py:1000
+msgid ""
+"Hold down \"Control\", or \"Command\" on a Mac, to select more than one."
+msgstr ""
+
+#: db/models/fields/related.py:1061
+#, python-format
+msgid "Please enter valid %(self)s IDs. The value %(value)r is invalid."
+msgid_plural ""
+"Please enter valid %(self)s IDs. The values %(value)r are invalid."
+msgstr[0] ""
+msgstr[1] ""
+
+#: forms/fields.py:65
+msgid "This field is required."
+msgstr ""
+
+#: forms/fields.py:204
+msgid "Enter a whole number."
+msgstr ""
+
+#: forms/fields.py:235 forms/fields.py:256
+msgid "Enter a number."
+msgstr ""
+
+#: forms/fields.py:259
+#, python-format
+msgid "Ensure that there are no more than %s digits in total."
+msgstr ""
+
+#: forms/fields.py:260
+#, python-format
+msgid "Ensure that there are no more than %s decimal places."
+msgstr ""
+
+#: forms/fields.py:261
+#, python-format
+msgid "Ensure that there are no more than %s digits before the decimal point."
+msgstr ""
+
+#: forms/fields.py:323 forms/fields.py:838
+msgid "Enter a valid date."
+msgstr ""
+
+#: forms/fields.py:351 forms/fields.py:839
+msgid "Enter a valid time."
+msgstr ""
+
+#: forms/fields.py:377
+msgid "Enter a valid date/time."
+msgstr ""
+
+#: forms/fields.py:435
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr ""
+
+#: forms/fields.py:436
+msgid "No file was submitted."
+msgstr ""
+
+#: forms/fields.py:437
+msgid "The submitted file is empty."
+msgstr ""
+
+#: forms/fields.py:438
+#, python-format
+msgid ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr ""
+
+#: forms/fields.py:473
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+
+#: forms/fields.py:596 forms/fields.py:671
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr ""
+
+#: forms/fields.py:672 forms/fields.py:734 forms/models.py:1002
+msgid "Enter a list of values."
+msgstr ""
+
+#: forms/formsets.py:298 forms/formsets.py:300
+msgid "Order"
+msgstr ""
+
+#: forms/models.py:562
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr ""
+
+#: forms/models.py:566
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+
+#: forms/models.py:572
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+
+#: forms/models.py:580
+msgid "Please correct the duplicate values below."
+msgstr ""
+
+#: forms/models.py:855
+msgid "The inline foreign key did not match the parent instance primary key."
+msgstr ""
+
+#: forms/models.py:921
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr ""
+
+#: forms/models.py:1003
+#, python-format
+msgid "Select a valid choice. %s is not one of the available choices."
+msgstr ""
+
+#: forms/models.py:1005
+#, python-format
+msgid "\"%s\" is not a valid value for a primary key."
+msgstr ""
+
+#: template/defaultfilters.py:776
+msgid "yes,no,maybe"
+msgstr ""
+
+#: template/defaultfilters.py:807
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] ""
+msgstr[1] ""
+
+#: template/defaultfilters.py:809
+#, python-format
+msgid "%s KB"
+msgstr ""
+
+#: template/defaultfilters.py:811
+#, python-format
+msgid "%s MB"
+msgstr ""
+
+#: template/defaultfilters.py:812
+#, python-format
+msgid "%s GB"
+msgstr ""
+
+#: utils/dateformat.py:42
+msgid "p.m."
+msgstr ""
+
+#: utils/dateformat.py:43
+msgid "a.m."
+msgstr ""
+
+#: utils/dateformat.py:48
+msgid "PM"
+msgstr ""
+
+#: utils/dateformat.py:49
+msgid "AM"
+msgstr ""
+
+#: utils/dateformat.py:98
+msgid "midnight"
+msgstr ""
+
+#: utils/dateformat.py:100
+msgid "noon"
+msgstr ""
+
+#: utils/dates.py:6
+msgid "Monday"
+msgstr ""
+
+#: utils/dates.py:6
+msgid "Tuesday"
+msgstr ""
+
+#: utils/dates.py:6
+msgid "Wednesday"
+msgstr ""
+
+#: utils/dates.py:6
+msgid "Thursday"
+msgstr ""
+
+#: utils/dates.py:6
+msgid "Friday"
+msgstr ""
+
+#: utils/dates.py:7
+msgid "Saturday"
+msgstr ""
+
+#: utils/dates.py:7
+msgid "Sunday"
+msgstr ""
+
+#: utils/dates.py:10
+msgid "Mon"
+msgstr ""
+
+#: utils/dates.py:10
+msgid "Tue"
+msgstr ""
+
+#: utils/dates.py:10
+msgid "Wed"
+msgstr ""
+
+#: utils/dates.py:10
+msgid "Thu"
+msgstr ""
+
+#: utils/dates.py:10
+msgid "Fri"
+msgstr ""
+
+#: utils/dates.py:11
+msgid "Sat"
+msgstr ""
+
+#: utils/dates.py:11
+msgid "Sun"
+msgstr ""
+
+#: utils/dates.py:18
+msgid "January"
+msgstr ""
+
+#: utils/dates.py:18
+msgid "February"
+msgstr ""
+
+#: utils/dates.py:18 utils/dates.py:31
+msgid "March"
+msgstr ""
+
+#: utils/dates.py:18 utils/dates.py:31
+msgid "April"
+msgstr ""
+
+#: utils/dates.py:18 utils/dates.py:31
+msgid "May"
+msgstr ""
+
+#: utils/dates.py:18 utils/dates.py:31
+msgid "June"
+msgstr ""
+
+#: utils/dates.py:19 utils/dates.py:31
+msgid "July"
+msgstr ""
+
+#: utils/dates.py:19
+msgid "August"
+msgstr ""
+
+#: utils/dates.py:19
+msgid "September"
+msgstr ""
+
+#: utils/dates.py:19
+msgid "October"
+msgstr ""
+
+#: utils/dates.py:19
+msgid "November"
+msgstr ""
+
+#: utils/dates.py:20
+msgid "December"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "jan"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "feb"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "mar"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "apr"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "may"
+msgstr ""
+
+#: utils/dates.py:23
+msgid "jun"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "jul"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "aug"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "sep"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "oct"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "nov"
+msgstr ""
+
+#: utils/dates.py:24
+msgid "dec"
+msgstr ""
+
+#: utils/dates.py:31
+msgid "Jan."
+msgstr ""
+
+#: utils/dates.py:31
+msgid "Feb."
+msgstr ""
+
+#: utils/dates.py:32
+msgid "Aug."
+msgstr ""
+
+#: utils/dates.py:32
+msgid "Sept."
+msgstr ""
+
+#: utils/dates.py:32
+msgid "Oct."
+msgstr ""
+
+#: utils/dates.py:32
+msgid "Nov."
+msgstr ""
+
+#: utils/dates.py:32
+msgid "Dec."
+msgstr ""
+
+#: utils/text.py:130
+msgid "or"
+msgstr ""
+
+#: utils/timesince.py:21
+msgid "year"
+msgid_plural "years"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:22
+msgid "month"
+msgid_plural "months"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:23
+msgid "week"
+msgid_plural "weeks"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:24
+msgid "day"
+msgid_plural "days"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:25
+msgid "hour"
+msgid_plural "hours"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:26
+msgid "minute"
+msgid_plural "minutes"
+msgstr[0] ""
+msgstr[1] ""
+
+#: utils/timesince.py:45
+msgid "minutes"
+msgstr ""
+
+#: utils/timesince.py:50
+#, python-format
+msgid "%(number)d %(type)s"
+msgstr ""
+
+#: utils/timesince.py:56
+#, python-format
+msgid ", %(number)d %(type)s"
+msgstr ""
+
+#: utils/translation/trans_real.py:519
+msgid "DATE_FORMAT"
+msgstr "N j, Y"
+
+#: utils/translation/trans_real.py:520
+msgid "DATETIME_FORMAT"
+msgstr "N j, Y, P"
+
+#: utils/translation/trans_real.py:521
+msgid "TIME_FORMAT"
+msgstr "P"
+
+#: utils/translation/trans_real.py:542
+msgid "YEAR_MONTH_FORMAT"
+msgstr "F Y"
+
+#: utils/translation/trans_real.py:543
+msgid "MONTH_DAY_FORMAT"
+msgstr "F j"
+
+#: views/generic/create_update.py:115
+#, python-format
+msgid "The %(verbose_name)s was created successfully."
+msgstr ""
+
+#: views/generic/create_update.py:158
+#, python-format
+msgid "The %(verbose_name)s was updated successfully."
+msgstr ""
+
+#: views/generic/create_update.py:201
+#, python-format
+msgid "The %(verbose_name)s was deleted."
+msgstr ""
diff --git a/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.mo b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.mo
new file mode 100644
index 0000000..2eda56d
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.mo
Binary files differ
diff --git a/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.po b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.po
new file mode 100644
index 0000000..bbd692a
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/LC_MESSAGES/djangojs.po
@@ -0,0 +1,145 @@
+# This file is distributed under the same license as the Django package.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: Django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2010-05-13 15:39+0200\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: contrib/admin/media/js/SelectFilter2.js:37
+#, perl-format
+msgid "Available %s"
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:45
+msgid "Choose all"
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:50
+msgid "Add"
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:52
+msgid "Remove"
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:57
+#, perl-format
+msgid "Chosen %s"
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:58
+msgid "Select your choice(s) and click "
+msgstr ""
+
+#: contrib/admin/media/js/SelectFilter2.js:63
+msgid "Clear all"
+msgstr ""
+
+#: contrib/admin/media/js/actions.js:18
+#: contrib/admin/media/js/actions.min.js:1
+msgid "%(sel)s of %(cnt)s selected"
+msgid_plural "%(sel)s of %(cnt)s selected"
+msgstr[0] ""
+msgstr[1] ""
+
+#: contrib/admin/media/js/actions.js:109
+#: contrib/admin/media/js/actions.min.js:5
+msgid ""
+"You have unsaved changes on individual editable fields. If you run an "
+"action, your unsaved changes will be lost."
+msgstr ""
+
+#: contrib/admin/media/js/actions.js:121
+#: contrib/admin/media/js/actions.min.js:6
+msgid ""
+"You have selected an action, but you haven't saved your changes to "
+"individual fields yet. Please click OK to save. You'll need to re-run the "
+"action."
+msgstr ""
+
+#: contrib/admin/media/js/actions.js:123
+#: contrib/admin/media/js/actions.min.js:6
+msgid ""
+"You have selected an action, and you haven't made any changes on individual "
+"fields. You're probably looking for the Go button rather than the Save "
+"button."
+msgstr ""
+
+#: contrib/admin/media/js/calendar.js:24
+#: contrib/admin/media/js/dateparse.js:32
+msgid ""
+"January February March April May June July August September October November "
+"December"
+msgstr ""
+
+#: contrib/admin/media/js/calendar.js:25
+msgid "S M T W T F S"
+msgstr ""
+
+#: contrib/admin/media/js/collapse.js:9 contrib/admin/media/js/collapse.js:21
+#: contrib/admin/media/js/collapse.min.js:1
+msgid "Show"
+msgstr ""
+
+#: contrib/admin/media/js/collapse.js:16
+#: contrib/admin/media/js/collapse.min.js:1
+msgid "Hide"
+msgstr ""
+
+#: contrib/admin/media/js/dateparse.js:33
+msgid "Sunday Monday Tuesday Wednesday Thursday Friday Saturday"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:49
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:84
+msgid "Now"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:53
+msgid "Clock"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:80
+msgid "Choose a time"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:85
+msgid "Midnight"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:86
+msgid "6 a.m."
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:87
+msgid "Noon"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:91
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:188
+msgid "Cancel"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:133
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:182
+msgid "Today"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:137
+msgid "Calendar"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:180
+msgid "Yesterday"
+msgstr ""
+
+#: contrib/admin/media/js/admin/DateTimeShortcuts.js:184
+msgid "Tomorrow"
+msgstr ""
diff --git a/google/appengine/_internal/django/conf/locale/en/__init__.py b/google/appengine/_internal/django/conf/locale/en/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/__init__.py
diff --git a/google/appengine/_internal/django/conf/locale/en/formats.py b/google/appengine/_internal/django/conf/locale/en/formats.py
new file mode 100644
index 0000000..c59a94a
--- /dev/null
+++ b/google/appengine/_internal/django/conf/locale/en/formats.py
@@ -0,0 +1,38 @@
+# -*- encoding: utf-8 -*-
+# This file is distributed under the same license as the Django package.
+#
+
+DATE_FORMAT = 'N j, Y'
+TIME_FORMAT = 'P'
+DATETIME_FORMAT = 'N j, Y, P'
+YEAR_MONTH_FORMAT = 'F Y'
+MONTH_DAY_FORMAT = 'F j'
+SHORT_DATE_FORMAT = 'm/d/Y'
+SHORT_DATETIME_FORMAT = 'm/d/Y P'
+FIRST_DAY_OF_WEEK = 0 # Sunday
+DATE_INPUT_FORMATS = (
+ '%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
+ # '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
+ # '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
+ # '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
+ # '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
+)
+TIME_INPUT_FORMATS = (
+ '%H:%M:%S', # '14:30:59'
+ '%H:%M', # '14:30'
+)
+DATETIME_INPUT_FORMATS = (
+ '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
+ '%Y-%m-%d %H:%M', # '2006-10-25 14:30'
+ '%Y-%m-%d', # '2006-10-25'
+ '%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
+ '%m/%d/%Y %H:%M', # '10/25/2006 14:30'
+ '%m/%d/%Y', # '10/25/2006'
+ '%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
+ '%m/%d/%y %H:%M', # '10/25/06 14:30'
+ '%m/%d/%y', # '10/25/06'
+)
+DECIMAL_SEPARATOR = u'.'
+THOUSAND_SEPARATOR = u','
+NUMBER_GROUPING = 3
+
diff --git a/google/appengine/_internal/django/conf/project_template/__init__.py b/google/appengine/_internal/django/conf/project_template/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/conf/project_template/__init__.py
diff --git a/google/appengine/_internal/django/conf/project_template/manage.py b/google/appengine/_internal/django/conf/project_template/manage.py
new file mode 100644
index 0000000..393449d
--- /dev/null
+++ b/google/appengine/_internal/django/conf/project_template/manage.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+from google.appengine._internal.django.core.management import execute_manager
+try:
+ import settings # Assumed to be in the same directory.
+except ImportError:
+ import sys
+ sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
+ sys.exit(1)
+
+if __name__ == "__main__":
+ execute_manager(settings)
diff --git a/google/appengine/_internal/django/conf/project_template/settings.py b/google/appengine/_internal/django/conf/project_template/settings.py
new file mode 100644
index 0000000..686fadc
--- /dev/null
+++ b/google/appengine/_internal/django/conf/project_template/settings.py
@@ -0,0 +1,96 @@
+# Django settings for {{ project_name }} project.
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+ADMINS = (
+ # ('Your Name', 'your_email@domain.com'),
+)
+
+MANAGERS = ADMINS
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
+ 'NAME': '', # Or path to database file if using sqlite3.
+ 'USER': '', # Not used with sqlite3.
+ 'PASSWORD': '', # Not used with sqlite3.
+ 'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
+ 'PORT': '', # Set to empty string for default. Not used with sqlite3.
+ }
+}
+
+# Local time zone for this installation. Choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
+# although not all choices may be available on all operating systems.
+# On Unix systems, a value of None will cause Django to use the same
+# timezone as the operating system.
+# If running in a Windows environment this must be set to the same as your
+# system time zone.
+TIME_ZONE = 'America/Chicago'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-us'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# If you set this to False, Django will not format dates, numbers and
+# calendars according to the current locale
+USE_L10N = True
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: "/home/media/media.lawrence.com/"
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash if there is a path component (optional in other cases).
+# Examples: "http://media.lawrence.com", "http://example.com/media/"
+MEDIA_URL = ''
+
+# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
+# trailing slash.
+# Examples: "http://foo.com/media/", "/media/".
+ADMIN_MEDIA_PREFIX = '/media/'
+
+# Make this unique, and don't share it with anybody.
+SECRET_KEY = ''
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+# 'django.template.loaders.eggs.Loader',
+)
+
+MIDDLEWARE_CLASSES = (
+ 'django.middleware.common.CommonMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+)
+
+ROOT_URLCONF = '{{ project_name }}.urls'
+
+TEMPLATE_DIRS = (
+ # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
+ # Always use forward slashes, even on Windows.
+ # Don't forget to use absolute paths, not relative paths.
+)
+
+INSTALLED_APPS = (
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
+ 'django.contrib.sessions',
+ 'django.contrib.sites',
+ 'django.contrib.messages',
+ # Uncomment the next line to enable the admin:
+ # 'django.contrib.admin',
+ # Uncomment the next line to enable admin documentation:
+ # 'django.contrib.admindocs',
+)
diff --git a/google/appengine/_internal/django/conf/project_template/urls.py b/google/appengine/_internal/django/conf/project_template/urls.py
new file mode 100644
index 0000000..e0cef14
--- /dev/null
+++ b/google/appengine/_internal/django/conf/project_template/urls.py
@@ -0,0 +1,16 @@
+from google.appengine._internal.django.conf.urls.defaults import *
+
+# Uncomment the next two lines to enable the admin:
+# from google.appengine._internal.django.contrib import admin
+# admin.autodiscover()
+
+urlpatterns = patterns('',
+ # Example:
+ # (r'^{{ project_name }}/', include('{{ project_name }}.foo.urls')),
+
+ # Uncomment the admin/doc line below to enable admin documentation:
+ # (r'^admin/doc/', include('django.contrib.admindocs.urls')),
+
+ # Uncomment the next line to enable the admin:
+ # (r'^admin/', include(admin.site.urls)),
+)
diff --git a/google/appengine/_internal/django/conf/urls/__init__.py b/google/appengine/_internal/django/conf/urls/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/conf/urls/__init__.py
diff --git a/google/appengine/_internal/django/conf/urls/defaults.py b/google/appengine/_internal/django/conf/urls/defaults.py
new file mode 100644
index 0000000..8f5cf1e
--- /dev/null
+++ b/google/appengine/_internal/django/conf/urls/defaults.py
@@ -0,0 +1,42 @@
+from google.appengine._internal.django.core.urlresolvers import RegexURLPattern, RegexURLResolver
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+
+__all__ = ['handler404', 'handler500', 'include', 'patterns', 'url']
+
+handler404 = 'django.views.defaults.page_not_found'
+handler500 = 'django.views.defaults.server_error'
+
+def include(arg, namespace=None, app_name=None):
+ if isinstance(arg, tuple):
+ # callable returning a namespace hint
+ if namespace:
+ raise ImproperlyConfigured('Cannot override the namespace for a dynamic module that provides a namespace')
+ urlconf_module, app_name, namespace = arg
+ else:
+ # No namespace hint - use manually provided namespace
+ urlconf_module = arg
+ return (urlconf_module, app_name, namespace)
+
+def patterns(prefix, *args):
+ pattern_list = []
+ for t in args:
+ if isinstance(t, (list, tuple)):
+ t = url(prefix=prefix, *t)
+ elif isinstance(t, RegexURLPattern):
+ t.add_prefix(prefix)
+ pattern_list.append(t)
+ return pattern_list
+
+def url(regex, view, kwargs=None, name=None, prefix=''):
+ if isinstance(view, (list,tuple)):
+ # For include(...) processing.
+ urlconf_module, app_name, namespace = view
+ return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace)
+ else:
+ if isinstance(view, basestring):
+ if not view:
+ raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex)
+ if prefix:
+ view = prefix + '.' + view
+ return RegexURLPattern(regex, view, kwargs, name)
+
diff --git a/google/appengine/_internal/django/conf/urls/i18n.py b/google/appengine/_internal/django/conf/urls/i18n.py
new file mode 100644
index 0000000..71daf84
--- /dev/null
+++ b/google/appengine/_internal/django/conf/urls/i18n.py
@@ -0,0 +1,5 @@
+from google.appengine._internal.django.conf.urls.defaults import *
+
+urlpatterns = patterns('',
+ (r'^setlang/$', 'django.views.i18n.set_language'),
+)
diff --git a/google/appengine/_internal/django/conf/urls/shortcut.py b/google/appengine/_internal/django/conf/urls/shortcut.py
new file mode 100644
index 0000000..0d8b147
--- /dev/null
+++ b/google/appengine/_internal/django/conf/urls/shortcut.py
@@ -0,0 +1,5 @@
+from google.appengine._internal.django.conf.urls.defaults import *
+
+urlpatterns = patterns('django.views',
+ (r'^(?P<content_type_id>\d+)/(?P<object_id>.*)/$', 'defaults.shortcut'),
+)
diff --git a/google/appengine/_internal/django/core/__init__.py b/google/appengine/_internal/django/core/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/core/__init__.py
diff --git a/google/appengine/_internal/django/core/cache/__init__.py b/google/appengine/_internal/django/core/cache/__init__.py
new file mode 100644
index 0000000..11bd6fd
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/__init__.py
@@ -0,0 +1,79 @@
+"""
+Caching framework.
+
+This package defines set of cache backends that all conform to a simple API.
+In a nutshell, a cache is a set of values -- which can be any object that
+may be pickled -- identified by string keys. For the complete API, see
+the abstract BaseCache class in django.core.cache.backends.base.
+
+Client code should not access a cache backend directly; instead it should
+either use the "cache" variable made available here, or it should use the
+get_cache() function made available here. get_cache() takes a backend URI
+(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
+cache class.
+
+See docs/cache.txt for information on the public API.
+"""
+
+try:
+ from urlparse import parse_qsl
+except ImportError:
+ from cgi import parse_qsl
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core import signals
+from google.appengine._internal.django.core.cache.backends.base import InvalidCacheBackendError, CacheKeyWarning
+from google.appengine._internal.django.utils import importlib
+
+# Name for use in settings file --> name of module in "backends" directory.
+# Any backend scheme that is not in this dictionary is treated as a Python
+# import path to a custom backend.
+BACKENDS = {
+ 'memcached': 'memcached',
+ 'locmem': 'locmem',
+ 'file': 'filebased',
+ 'db': 'db',
+ 'dummy': 'dummy',
+}
+
+def parse_backend_uri(backend_uri):
+ """
+ Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a
+ host and any extra params that are required for the backend. Returns a
+ (scheme, host, params) tuple.
+ """
+ if backend_uri.find(':') == -1:
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+ scheme, rest = backend_uri.split(':', 1)
+ if not rest.startswith('//'):
+ raise InvalidCacheBackendError("Backend URI must start with scheme://")
+
+ host = rest[2:]
+ qpos = rest.find('?')
+ if qpos != -1:
+ params = dict(parse_qsl(rest[qpos+1:]))
+ host = rest[2:qpos]
+ else:
+ params = {}
+ if host.endswith('/'):
+ host = host[:-1]
+
+ return scheme, host, params
+
+def get_cache(backend_uri):
+ scheme, host, params = parse_backend_uri(backend_uri)
+ if scheme in BACKENDS:
+ name = 'django.core.cache.backends.%s' % BACKENDS[scheme]
+ else:
+ name = scheme
+ module = importlib.import_module(name)
+ return getattr(module, 'CacheClass')(host, params)
+
+cache = get_cache(settings.CACHE_BACKEND)
+
+# Some caches -- pythont-memcached in particular -- need to do a cleanup at the
+# end of a request cycle. If the cache provides a close() method, wire it up
+# here.
+if hasattr(cache, 'close'):
+ signals.request_finished.connect(cache.close)
+
diff --git a/google/appengine/_internal/django/core/cache/backends/__init__.py b/google/appengine/_internal/django/core/cache/backends/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/__init__.py
diff --git a/google/appengine/_internal/django/core/cache/backends/base.py b/google/appengine/_internal/django/core/cache/backends/base.py
new file mode 100644
index 0000000..3e50a3e
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/base.py
@@ -0,0 +1,144 @@
+"Base Cache class."
+
+import warnings
+
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning
+
+class InvalidCacheBackendError(ImproperlyConfigured):
+ pass
+
+class CacheKeyWarning(DjangoRuntimeWarning):
+ pass
+
+# Memcached does not accept keys longer than this.
+MEMCACHE_MAX_KEY_LENGTH = 250
+
+class BaseCache(object):
+ def __init__(self, params):
+ timeout = params.get('timeout', 300)
+ try:
+ timeout = int(timeout)
+ except (ValueError, TypeError):
+ timeout = 300
+ self.default_timeout = timeout
+
+ def add(self, key, value, timeout=None):
+ """
+ Set a value in the cache if the key does not already exist. If
+ timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+
+ Returns True if the value was stored, False otherwise.
+ """
+ raise NotImplementedError
+
+ def get(self, key, default=None):
+ """
+ Fetch a given key from the cache. If the key does not exist, return
+ default, which itself defaults to None.
+ """
+ raise NotImplementedError
+
+ def set(self, key, value, timeout=None):
+ """
+ Set a value in the cache. If timeout is given, that timeout will be
+ used for the key; otherwise the default cache timeout will be used.
+ """
+ raise NotImplementedError
+
+ def delete(self, key):
+ """
+ Delete a key from the cache, failing silently.
+ """
+ raise NotImplementedError
+
+ def get_many(self, keys):
+ """
+ Fetch a bunch of keys from the cache. For certain backends (memcached,
+ pgsql) this can be *much* faster when fetching multiple values.
+
+ Returns a dict mapping each key in keys to its value. If the given
+ key is missing, it will be missing from the response dict.
+ """
+ d = {}
+ for k in keys:
+ val = self.get(k)
+ if val is not None:
+ d[k] = val
+ return d
+
+ def has_key(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ return self.get(key) is not None
+
+ def incr(self, key, delta=1):
+ """
+ Add delta to value in the cache. If the key does not exist, raise a
+ ValueError exception.
+ """
+ if key not in self:
+ raise ValueError("Key '%s' not found" % key)
+ new_value = self.get(key) + delta
+ self.set(key, new_value)
+ return new_value
+
+ def decr(self, key, delta=1):
+ """
+ Subtract delta from value in the cache. If the key does not exist, raise
+ a ValueError exception.
+ """
+ return self.incr(key, -delta)
+
+ def __contains__(self, key):
+ """
+ Returns True if the key is in the cache and has not expired.
+ """
+ # This is a separate method, rather than just a copy of has_key(),
+ # so that it always has the same functionality as has_key(), even
+ # if a subclass overrides it.
+ return self.has_key(key)
+
+ def set_many(self, data, timeout=None):
+ """
+ Set a bunch of values in the cache at once from a dict of key/value
+ pairs. For certain backends (memcached), this is much more efficient
+ than calling set() multiple times.
+
+ If timeout is given, that timeout will be used for the key; otherwise
+ the default cache timeout will be used.
+ """
+ for key, value in data.items():
+ self.set(key, value, timeout)
+
+ def delete_many(self, keys):
+ """
+ Set a bunch of values in the cache at once. For certain backends
+ (memcached), this is much more efficient than calling delete() multiple
+ times.
+ """
+ for key in keys:
+ self.delete(key)
+
+ def clear(self):
+ """Remove *all* values from the cache at once."""
+ raise NotImplementedError
+
+ def validate_key(self, key):
+ """
+ Warn about keys that would not be portable to the memcached
+ backend. This encourages (but does not force) writing backend-portable
+ cache code.
+
+ """
+ if len(key) > MEMCACHE_MAX_KEY_LENGTH:
+ warnings.warn('Cache key will cause errors if used with memcached: '
+ '%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH),
+ CacheKeyWarning)
+ for char in key:
+ if ord(char) < 33 or ord(char) == 127:
+ warnings.warn('Cache key contains characters that will cause '
+ 'errors if used with memcached: %r' % key,
+ CacheKeyWarning)
+
diff --git a/google/appengine/_internal/django/core/cache/backends/db.py b/google/appengine/_internal/django/core/cache/backends/db.py
new file mode 100644
index 0000000..2718ff2
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/db.py
@@ -0,0 +1,145 @@
+"Database cache backend."
+
+from google.appengine._internal.django.core.cache.backends.base import BaseCache
+from google.appengine._internal.django.db import connections, router, transaction, DatabaseError
+import base64, time
+from datetime import datetime
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+class Options(object):
+ """A class that will quack like a Django model _meta class.
+
+ This allows cache operations to be controlled by the router
+ """
+ def __init__(self, table):
+ self.db_table = table
+ self.app_label = 'django_cache'
+ self.module_name = 'cacheentry'
+ self.verbose_name = 'cache entry'
+ self.verbose_name_plural = 'cache entries'
+ self.object_name = 'CacheEntry'
+ self.abstract = False
+ self.managed = True
+ self.proxy = False
+
+class CacheClass(BaseCache):
+ def __init__(self, table, params):
+ BaseCache.__init__(self, params)
+ self._table = table
+
+ class CacheEntry(object):
+ _meta = Options(table)
+ self.cache_model_class = CacheEntry
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ def get(self, key, default=None):
+ self.validate_key(key)
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key])
+ row = cursor.fetchone()
+ if row is None:
+ return default
+ now = datetime.now()
+ if row[2] < now:
+ db = router.db_for_write(self.cache_model_class)
+ cursor = connections[db].cursor()
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
+ transaction.commit_unless_managed(using=db)
+ return default
+ value = connections[db].ops.process_clob(row[1])
+ return pickle.loads(base64.decodestring(value))
+
+ def set(self, key, value, timeout=None):
+ self.validate_key(key)
+ self._base_set('set', key, value, timeout)
+
+ def add(self, key, value, timeout=None):
+ self.validate_key(key)
+ return self._base_set('add', key, value, timeout)
+
+ def _base_set(self, mode, key, value, timeout=None):
+ if timeout is None:
+ timeout = self.default_timeout
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ now = datetime.now().replace(microsecond=0)
+ exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
+ if num > self._max_entries:
+ self._cull(db, cursor, now)
+ encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
+ cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key])
+ try:
+ result = cursor.fetchone()
+ if result and (mode == 'set' or
+ (mode == 'add' and result[1] < now)):
+ cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table,
+ [encoded, connections[db].ops.value_to_db_datetime(exp), key])
+ else:
+ cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table,
+ [key, encoded, connections[db].ops.value_to_db_datetime(exp)])
+ except DatabaseError:
+ # To be threadsafe, updates/inserts are allowed to fail silently
+ transaction.rollback_unless_managed(using=db)
+ return False
+ else:
+ transaction.commit_unless_managed(using=db)
+ return True
+
+ def delete(self, key):
+ self.validate_key(key)
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
+ transaction.commit_unless_managed(using=db)
+
+ def has_key(self, key):
+ self.validate_key(key)
+ db = router.db_for_read(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+
+ now = datetime.now().replace(microsecond=0)
+ cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table,
+ [key, connections[db].ops.value_to_db_datetime(now)])
+ return cursor.fetchone() is not None
+
+ def _cull(self, db, cursor, now):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ table = connections[db].ops.quote_name(self._table)
+ cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
+ [connections[db].ops.value_to_db_datetime(now)])
+ cursor.execute("SELECT COUNT(*) FROM %s" % table)
+ num = cursor.fetchone()[0]
+ if num > self._max_entries:
+ cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [num / self._cull_frequency])
+ cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]])
+
+ def clear(self):
+ db = router.db_for_write(self.cache_model_class)
+ table = connections[db].ops.quote_name(self._table)
+ cursor = connections[db].cursor()
+ cursor.execute('DELETE FROM %s' % table)
diff --git a/google/appengine/_internal/django/core/cache/backends/dummy.py b/google/appengine/_internal/django/core/cache/backends/dummy.py
new file mode 100644
index 0000000..6d4cd3a
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/dummy.py
@@ -0,0 +1,37 @@
+"Dummy cache backend"
+
+from google.appengine._internal.django.core.cache.backends.base import BaseCache
+
+class CacheClass(BaseCache):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def add(self, key, *args, **kwargs):
+ self.validate_key(key)
+ return True
+
+ def get(self, key, default=None):
+ self.validate_key(key)
+ return default
+
+ def set(self, key, *args, **kwargs):
+ self.validate_key(key)
+
+ def delete(self, key, *args, **kwargs):
+ self.validate_key(key)
+
+ def get_many(self, *args, **kwargs):
+ return {}
+
+ def has_key(self, key, *args, **kwargs):
+ self.validate_key(key)
+ return False
+
+ def set_many(self, *args, **kwargs):
+ pass
+
+ def delete_many(self, *args, **kwargs):
+ pass
+
+ def clear(self):
+ pass
diff --git a/google/appengine/_internal/django/core/cache/backends/filebased.py b/google/appengine/_internal/django/core/cache/backends/filebased.py
new file mode 100644
index 0000000..d9ebdec
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/filebased.py
@@ -0,0 +1,171 @@
+"File-based cache backend"
+
+import os
+import time
+import shutil
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+from google.appengine._internal.django.core.cache.backends.base import BaseCache
+from google.appengine._internal.django.utils.hashcompat import md5_constructor
+
+class CacheClass(BaseCache):
+ def __init__(self, dir, params):
+ BaseCache.__init__(self, params)
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self._dir = dir
+ if not os.path.exists(self._dir):
+ self._createdir()
+
+ def add(self, key, value, timeout=None):
+ self.validate_key(key)
+ if self.has_key(key):
+ return False
+
+ self.set(key, value, timeout)
+ return True
+
+ def get(self, key, default=None):
+ self.validate_key(key)
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ try:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ self._delete(fname)
+ else:
+ return pickle.load(f)
+ finally:
+ f.close()
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ pass
+ return default
+
+ def set(self, key, value, timeout=None):
+ self.validate_key(key)
+ fname = self._key_to_file(key)
+ dirname = os.path.dirname(fname)
+
+ if timeout is None:
+ timeout = self.default_timeout
+
+ self._cull()
+
+ try:
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ f = open(fname, 'wb')
+ try:
+ now = time.time()
+ pickle.dump(now + timeout, f, pickle.HIGHEST_PROTOCOL)
+ pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
+ finally:
+ f.close()
+ except (IOError, OSError):
+ pass
+
+ def delete(self, key):
+ self.validate_key(key)
+ try:
+ self._delete(self._key_to_file(key))
+ except (IOError, OSError):
+ pass
+
+ def _delete(self, fname):
+ os.remove(fname)
+ try:
+ # Remove the 2 subdirs if they're empty
+ dirname = os.path.dirname(fname)
+ os.rmdir(dirname)
+ os.rmdir(os.path.dirname(dirname))
+ except (IOError, OSError):
+ pass
+
+ def has_key(self, key):
+ self.validate_key(key)
+ fname = self._key_to_file(key)
+ try:
+ f = open(fname, 'rb')
+ try:
+ exp = pickle.load(f)
+ now = time.time()
+ if exp < now:
+ self._delete(fname)
+ return False
+ else:
+ return True
+ finally:
+ f.close()
+ except (IOError, OSError, EOFError, pickle.PickleError):
+ return False
+
+ def _cull(self):
+ if int(self._num_entries) < self._max_entries:
+ return
+
+ try:
+ filelist = sorted(os.listdir(self._dir))
+ except (IOError, OSError):
+ return
+
+ if self._cull_frequency == 0:
+ doomed = filelist
+ else:
+ doomed = [os.path.join(self._dir, k) for (i, k) in enumerate(filelist) if i % self._cull_frequency == 0]
+
+ for topdir in doomed:
+ try:
+ for root, _, files in os.walk(topdir):
+ for f in files:
+ self._delete(os.path.join(root, f))
+ except (IOError, OSError):
+ pass
+
+ def _createdir(self):
+ try:
+ os.makedirs(self._dir)
+ except OSError:
+ raise EnvironmentError("Cache directory '%s' does not exist and could not be created'" % self._dir)
+
+ def _key_to_file(self, key):
+ """
+ Convert the filename into an md5 string. We'll turn the first couple
+ bits of the path into directory prefixes to be nice to filesystems
+ that have problems with large numbers of files in a directory.
+
+ Thus, a cache key of "foo" gets turnned into a file named
+ ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
+ """
+ path = md5_constructor(key.encode('utf-8')).hexdigest()
+ path = os.path.join(path[:2], path[2:4], path[4:])
+ return os.path.join(self._dir, path)
+
+ def _get_num_entries(self):
+ count = 0
+ for _,_,files in os.walk(self._dir):
+ count += len(files)
+ return count
+ _num_entries = property(_get_num_entries)
+
+ def clear(self):
+ try:
+ shutil.rmtree(self._dir)
+ except (IOError, OSError):
+ pass
diff --git a/google/appengine/_internal/django/core/cache/backends/locmem.py b/google/appengine/_internal/django/core/cache/backends/locmem.py
new file mode 100644
index 0000000..d6e621f
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/locmem.py
@@ -0,0 +1,143 @@
+"Thread-safe in-memory cache backend."
+
+import time
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+from google.appengine._internal.django.core.cache.backends.base import BaseCache
+from google.appengine._internal.django.utils.synch import RWLock
+
+class CacheClass(BaseCache):
+ def __init__(self, _, params):
+ BaseCache.__init__(self, params)
+ self._cache = {}
+ self._expire_info = {}
+
+ max_entries = params.get('max_entries', 300)
+ try:
+ self._max_entries = int(max_entries)
+ except (ValueError, TypeError):
+ self._max_entries = 300
+
+ cull_frequency = params.get('cull_frequency', 3)
+ try:
+ self._cull_frequency = int(cull_frequency)
+ except (ValueError, TypeError):
+ self._cull_frequency = 3
+
+ self._lock = RWLock()
+
+ def add(self, key, value, timeout=None):
+ self.validate_key(key)
+ self._lock.writer_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None or exp <= time.time():
+ try:
+ self._set(key, pickle.dumps(value), timeout)
+ return True
+ except pickle.PickleError:
+ pass
+ return False
+ finally:
+ self._lock.writer_leaves()
+
+ def get(self, key, default=None):
+ self.validate_key(key)
+ self._lock.reader_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return default
+ elif exp > time.time():
+ try:
+ return pickle.loads(self._cache[key])
+ except pickle.PickleError:
+ return default
+ finally:
+ self._lock.reader_leaves()
+ self._lock.writer_enters()
+ try:
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return default
+ finally:
+ self._lock.writer_leaves()
+
+ def _set(self, key, value, timeout=None):
+ if len(self._cache) >= self._max_entries:
+ self._cull()
+ if timeout is None:
+ timeout = self.default_timeout
+ self._cache[key] = value
+ self._expire_info[key] = time.time() + timeout
+
+ def set(self, key, value, timeout=None):
+ self.validate_key(key)
+ self._lock.writer_enters()
+ # Python 2.4 doesn't allow combined try-except-finally blocks.
+ try:
+ try:
+ self._set(key, pickle.dumps(value), timeout)
+ except pickle.PickleError:
+ pass
+ finally:
+ self._lock.writer_leaves()
+
+ def has_key(self, key):
+ self.validate_key(key)
+ self._lock.reader_enters()
+ try:
+ exp = self._expire_info.get(key)
+ if exp is None:
+ return False
+ elif exp > time.time():
+ return True
+ finally:
+ self._lock.reader_leaves()
+
+ self._lock.writer_enters()
+ try:
+ try:
+ del self._cache[key]
+ del self._expire_info[key]
+ except KeyError:
+ pass
+ return False
+ finally:
+ self._lock.writer_leaves()
+
+ def _cull(self):
+ if self._cull_frequency == 0:
+ self.clear()
+ else:
+ doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
+ for k in doomed:
+ self._delete(k)
+
+ def _delete(self, key):
+ try:
+ del self._cache[key]
+ except KeyError:
+ pass
+ try:
+ del self._expire_info[key]
+ except KeyError:
+ pass
+
+ def delete(self, key):
+ self.validate_key(key)
+ self._lock.writer_enters()
+ try:
+ self._delete(key)
+ finally:
+ self._lock.writer_leaves()
+
+ def clear(self):
+ self._cache.clear()
+ self._expire_info.clear()
diff --git a/google/appengine/_internal/django/core/cache/backends/memcached.py b/google/appengine/_internal/django/core/cache/backends/memcached.py
new file mode 100644
index 0000000..cf906cc
--- /dev/null
+++ b/google/appengine/_internal/django/core/cache/backends/memcached.py
@@ -0,0 +1,104 @@
+"Memcached cache backend"
+
+import time
+
+from google.appengine._internal.django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
+from google.appengine._internal.django.utils.encoding import smart_unicode, smart_str
+
+try:
+ import cmemcache as memcache
+ import warnings
+ warnings.warn(
+ "Support for the 'cmemcache' library has been deprecated. Please use python-memcached instead.",
+ PendingDeprecationWarning
+ )
+except ImportError:
+ try:
+ import memcache
+ except:
+ raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
+
+class CacheClass(BaseCache):
+ def __init__(self, server, params):
+ BaseCache.__init__(self, params)
+ self._cache = memcache.Client(server.split(';'))
+
+ def _get_memcache_timeout(self, timeout):
+ """
+ Memcached deals with long (> 30 days) timeouts in a special
+ way. Call this function to obtain a safe value for your timeout.
+ """
+ timeout = timeout or self.default_timeout
+ if timeout > 2592000: # 60*60*24*30, 30 days
+ # See http://code.google.com/p/memcached/wiki/FAQ
+ # "You can set expire times up to 30 days in the future. After that
+ # memcached interprets it as a date, and will expire the item after
+ # said date. This is a simple (but obscure) mechanic."
+ #
+ # This means that we have to switch to absolute timestamps.
+ timeout += int(time.time())
+ return timeout
+
+ def add(self, key, value, timeout=0):
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ return self._cache.add(smart_str(key), value, self._get_memcache_timeout(timeout))
+
+ def get(self, key, default=None):
+ val = self._cache.get(smart_str(key))
+ if val is None:
+ return default
+ return val
+
+ def set(self, key, value, timeout=0):
+ self._cache.set(smart_str(key), value, self._get_memcache_timeout(timeout))
+
+ def delete(self, key):
+ self._cache.delete(smart_str(key))
+
+ def get_many(self, keys):
+ return self._cache.get_multi(map(smart_str,keys))
+
+ def close(self, **kwargs):
+ self._cache.disconnect_all()
+
+ def incr(self, key, delta=1):
+ try:
+ val = self._cache.incr(key, delta)
+
+ # python-memcache responds to incr on non-existent keys by
+ # raising a ValueError. Cmemcache returns None. In both
+ # cases, we should raise a ValueError though.
+ except ValueError:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+
+ return val
+
+ def decr(self, key, delta=1):
+ try:
+ val = self._cache.decr(key, delta)
+
+ # python-memcache responds to decr on non-existent keys by
+ # raising a ValueError. Cmemcache returns None. In both
+ # cases, we should raise a ValueError though.
+ except ValueError:
+ val = None
+ if val is None:
+ raise ValueError("Key '%s' not found" % key)
+ return val
+
+ def set_many(self, data, timeout=0):
+ safe_data = {}
+ for key, value in data.items():
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ safe_data[smart_str(key)] = value
+ self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
+
+ def delete_many(self, keys):
+ self._cache.delete_multi(map(smart_str, keys))
+
+ def clear(self):
+ self._cache.flush_all()
diff --git a/google/appengine/_internal/django/core/context_processors.py b/google/appengine/_internal/django/core/context_processors.py
new file mode 100644
index 0000000..8dccea2
--- /dev/null
+++ b/google/appengine/_internal/django/core/context_processors.py
@@ -0,0 +1,102 @@
+"""
+A set of request processors that return dictionaries to be merged into a
+template context. Each function takes the request object as its only parameter
+and returns a dictionary to add to the context.
+
+These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
+RequestContext.
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.middleware.csrf import get_token
+from google.appengine._internal.django.utils.functional import lazy
+
+def auth(request):
+ """
+ DEPRECATED. This context processor is the old location, and has been moved
+ to `django.contrib.auth.context_processors`.
+
+ This function still exists for backwards-compatibility; it will be removed
+ in Django 1.4.
+ """
+ import warnings
+ warnings.warn(
+ "The context processor at `django.core.context_processors.auth` is " "deprecated; use the path `django.contrib.auth.context_processors.auth` " "instead.",
+ PendingDeprecationWarning
+ )
+ from google.appengine._internal.django.contrib.auth.context_processors import auth as auth_context_processor
+ return auth_context_processor(request)
+
+def csrf(request):
+ """
+ Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if
+ it has not been provided by either a view decorator or the middleware
+ """
+ def _get_val():
+ token = get_token(request)
+ if token is None:
+ # In order to be able to provide debugging info in the
+ # case of misconfiguration, we use a sentinel value
+ # instead of returning an empty dict.
+ return 'NOTPROVIDED'
+ else:
+ return token
+ _get_val = lazy(_get_val, str)
+
+ return {'csrf_token': _get_val() }
+
+def debug(request):
+ "Returns context variables helpful for debugging."
+ context_extras = {}
+ if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
+ context_extras['debug'] = True
+ from google.appengine._internal.django.db import connection
+ context_extras['sql_queries'] = connection.queries
+ return context_extras
+
+def i18n(request):
+ from google.appengine._internal.django.utils import translation
+
+ context_extras = {}
+ context_extras['LANGUAGES'] = settings.LANGUAGES
+ context_extras['LANGUAGE_CODE'] = translation.get_language()
+ context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
+
+ return context_extras
+
+def media(request):
+ """
+ Adds media-related context variables to the context.
+
+ """
+ return {'MEDIA_URL': settings.MEDIA_URL}
+
+def request(request):
+ return {'request': request}
+
+# PermWrapper and PermLookupDict proxy the permissions system into objects that
+# the template system can understand.
+
+class PermLookupDict(object):
+ def __init__(self, user, module_name):
+ self.user, self.module_name = user, module_name
+
+ def __repr__(self):
+ return str(self.user.get_all_permissions())
+
+ def __getitem__(self, perm_name):
+ return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
+
+ def __nonzero__(self):
+ return self.user.has_module_perms(self.module_name)
+
+class PermWrapper(object):
+ def __init__(self, user):
+ self.user = user
+
+ def __getitem__(self, module_name):
+ return PermLookupDict(self.user, module_name)
+
+ def __iter__(self):
+ # I am large, I contain multitudes.
+ raise TypeError("PermWrapper is not iterable.")
diff --git a/google/appengine/_internal/django/core/exceptions.py b/google/appengine/_internal/django/core/exceptions.py
new file mode 100644
index 0000000..f08b19d
--- /dev/null
+++ b/google/appengine/_internal/django/core/exceptions.py
@@ -0,0 +1,87 @@
+"""
+Global Django exception and warning classes.
+"""
+
+class DjangoRuntimeWarning(RuntimeWarning):
+ pass
+
+class ObjectDoesNotExist(Exception):
+ "The requested object does not exist"
+ silent_variable_failure = True
+
+class MultipleObjectsReturned(Exception):
+ "The query returned multiple objects when only one was expected."
+ pass
+
+class SuspiciousOperation(Exception):
+ "The user did something suspicious"
+ pass
+
+class PermissionDenied(Exception):
+ "The user did not have permission to do that"
+ pass
+
+class ViewDoesNotExist(Exception):
+ "The requested view does not exist"
+ pass
+
+class MiddlewareNotUsed(Exception):
+ "This middleware is not used in this server configuration"
+ pass
+
+class ImproperlyConfigured(Exception):
+ "Django is somehow improperly configured"
+ pass
+
+class FieldError(Exception):
+ """Some kind of problem with a model field."""
+ pass
+
+NON_FIELD_ERRORS = '__all__'
+class ValidationError(Exception):
+ """An error while validating data."""
+ def __init__(self, message, code=None, params=None):
+ import operator
+ from google.appengine._internal.django.utils.encoding import force_unicode
+ """
+ ValidationError can be passed any object that can be printed (usually
+ a string), a list of objects or a dictionary.
+ """
+ if isinstance(message, dict):
+ self.message_dict = message
+ # Reduce each list of messages into a single list.
+ message = reduce(operator.add, message.values())
+
+ if isinstance(message, list):
+ self.messages = [force_unicode(msg) for msg in message]
+ else:
+ self.code = code
+ self.params = params
+ message = force_unicode(message)
+ self.messages = [message]
+
+ def __str__(self):
+ # This is needed because, without a __str__(), printing an exception
+ # instance would result in this:
+ # AttributeError: ValidationError instance has no attribute 'args'
+ # See http://www.python.org/doc/current/tut/node10.html#handling
+ if hasattr(self, 'message_dict'):
+ return repr(self.message_dict)
+ return repr(self.messages)
+
+ def __repr__(self):
+ if hasattr(self, 'message_dict'):
+ return 'ValidationError(%s)' % repr(self.message_dict)
+ return 'ValidationError(%s)' % repr(self.messages)
+
+ def update_error_dict(self, error_dict):
+ if hasattr(self, 'message_dict'):
+ if error_dict:
+ for k, v in self.message_dict.items():
+ error_dict.setdefault(k, []).extend(v)
+ else:
+ error_dict = self.message_dict
+ else:
+ error_dict[NON_FIELD_ERRORS] = self.messages
+ return error_dict
+
diff --git a/google/appengine/_internal/django/core/files/__init__.py b/google/appengine/_internal/django/core/files/__init__.py
new file mode 100644
index 0000000..8c0e840
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/__init__.py
@@ -0,0 +1 @@
+from google.appengine._internal.django.core.files.base import File
diff --git a/google/appengine/_internal/django/core/files/base.py b/google/appengine/_internal/django/core/files/base.py
new file mode 100644
index 0000000..550b20b
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/base.py
@@ -0,0 +1,134 @@
+import os
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from google.appengine._internal.django.utils.encoding import smart_str, smart_unicode
+from google.appengine._internal.django.core.files.utils import FileProxyMixin
+
+class File(FileProxyMixin):
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, file, name=None):
+ self.file = file
+ if name is None:
+ name = getattr(file, 'name', None)
+ self.name = name
+ self.mode = getattr(file, 'mode', None)
+
+ def __str__(self):
+ return smart_str(self.name or '')
+
+ def __unicode__(self):
+ return smart_unicode(self.name or u'')
+
+ def __repr__(self):
+ return "<%s: %s>" % (self.__class__.__name__, self or "None")
+
+ def __nonzero__(self):
+ return bool(self.name)
+
+ def __len__(self):
+ return self.size
+
+ def _get_size(self):
+ if not hasattr(self, '_size'):
+ if hasattr(self.file, 'size'):
+ self._size = self.file.size
+ elif os.path.exists(self.file.name):
+ self._size = os.path.getsize(self.file.name)
+ else:
+ raise AttributeError("Unable to determine the file's size.")
+ return self._size
+
+ def _set_size(self, size):
+ self._size = size
+
+ size = property(_get_size, _set_size)
+
+ def _get_closed(self):
+ return not self.file or self.file.closed
+ closed = property(_get_closed)
+
+ def chunks(self, chunk_size=None):
+ """
+ Read the file and yield chucks of ``chunk_size`` bytes (defaults to
+ ``UploadedFile.DEFAULT_CHUNK_SIZE``).
+ """
+ if not chunk_size:
+ chunk_size = self.DEFAULT_CHUNK_SIZE
+
+ if hasattr(self, 'seek'):
+ self.seek(0)
+ # Assume the pointer is at zero...
+ counter = self.size
+
+ while counter > 0:
+ yield self.read(chunk_size)
+ counter -= chunk_size
+
+ def multiple_chunks(self, chunk_size=None):
+ """
+ Returns ``True`` if you can expect multiple chunks.
+
+ NB: If a particular file representation is in memory, subclasses should
+ always return ``False`` -- there's no good reason to read from memory in
+ chunks.
+ """
+ if not chunk_size:
+ chunk_size = self.DEFAULT_CHUNK_SIZE
+ return self.size > chunk_size
+
+ def __iter__(self):
+ # Iterate over this file-like object by newlines
+ buffer_ = None
+ for chunk in self.chunks():
+ chunk_buffer = StringIO(chunk)
+
+ for line in chunk_buffer:
+ if buffer_:
+ line = buffer_ + line
+ buffer_ = None
+
+ # If this is the end of a line, yield
+ # otherwise, wait for the next round
+ if line[-1] in ('\n', '\r'):
+ yield line
+ else:
+ buffer_ = line
+
+ if buffer_ is not None:
+ yield buffer_
+
+ def open(self, mode=None):
+ if not self.closed:
+ self.seek(0)
+ elif self.name and os.path.exists(self.name):
+ self.file = open(self.name, mode or self.mode)
+ else:
+ raise ValueError("The file cannot be reopened.")
+
+ def close(self):
+ self.file.close()
+
+class ContentFile(File):
+ """
+ A File-like object that takes just raw content, rather than an actual file.
+ """
+ def __init__(self, content):
+ content = content or ''
+ super(ContentFile, self).__init__(StringIO(content))
+ self.size = len(content)
+
+ def __str__(self):
+ return 'Raw content'
+
+ def __nonzero__(self):
+ return True
+
+ def open(self, mode=None):
+ self.seek(0)
+
+ def close(self):
+ pass
diff --git a/google/appengine/_internal/django/core/files/images.py b/google/appengine/_internal/django/core/files/images.py
new file mode 100644
index 0000000..2d2a0e8
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/images.py
@@ -0,0 +1,62 @@
+"""
+Utility functions for handling images.
+
+Requires PIL, as you might imagine.
+"""
+
+from google.appengine._internal.django.core.files import File
+
+class ImageFile(File):
+ """
+ A mixin for use alongside django.core.files.base.File, which provides
+ additional features for dealing with images.
+ """
+ def _get_width(self):
+ return self._get_image_dimensions()[0]
+ width = property(_get_width)
+
+ def _get_height(self):
+ return self._get_image_dimensions()[1]
+ height = property(_get_height)
+
+ def _get_image_dimensions(self):
+ if not hasattr(self, '_dimensions_cache'):
+ close = self.closed
+ self.open()
+ self._dimensions_cache = get_image_dimensions(self, close=close)
+ return self._dimensions_cache
+
+def get_image_dimensions(file_or_path, close=False):
+ """
+ Returns the (width, height) of an image, given an open file or a path. Set
+ 'close' to True to close the file at the end if it is initially in an open
+ state.
+ """
+ # Try to import PIL in either of the two ways it can end up installed.
+ try:
+ from PIL import ImageFile as PILImageFile
+ except ImportError:
+ import ImageFile as PILImageFile
+
+ p = PILImageFile.Parser()
+ if hasattr(file_or_path, 'read'):
+ file = file_or_path
+ file_pos = file.tell()
+ file.seek(0)
+ else:
+ file = open(file_or_path, 'rb')
+ close = True
+ try:
+ while 1:
+ data = file.read(1024)
+ if not data:
+ break
+ p.feed(data)
+ if p.image:
+ return p.image.size
+ return None
+ finally:
+ if close:
+ file.close()
+ else:
+ file.seek(file_pos)
diff --git a/google/appengine/_internal/django/core/files/locks.py b/google/appengine/_internal/django/core/files/locks.py
new file mode 100644
index 0000000..d235bfd
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/locks.py
@@ -0,0 +1,70 @@
+"""
+Portable file locking utilities.
+
+Based partially on example by Jonathan Feignberg <jdf@pobox.com> in the Python
+Cookbook, licensed under the Python Software License.
+
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203
+
+Example Usage::
+
+ >>> from google.appengine._internal.django.core.files import locks
+ >>> f = open('./file', 'wb')
+ >>> locks.lock(f, locks.LOCK_EX)
+ >>> f.write('Django')
+ >>> f.close()
+"""
+
+__all__ = ('LOCK_EX','LOCK_SH','LOCK_NB','lock','unlock')
+
+system_type = None
+
+try:
+ import win32con
+ import win32file
+ import pywintypes
+ LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
+ LOCK_SH = 0
+ LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY
+ __overlapped = pywintypes.OVERLAPPED()
+ system_type = 'nt'
+except (ImportError, AttributeError):
+ pass
+
+try:
+ import fcntl
+ LOCK_EX = fcntl.LOCK_EX
+ LOCK_SH = fcntl.LOCK_SH
+ LOCK_NB = fcntl.LOCK_NB
+ system_type = 'posix'
+except (ImportError, AttributeError):
+ pass
+
+def fd(f):
+ """Get a filedescriptor from something which could be a file or an fd."""
+ return hasattr(f, 'fileno') and f.fileno() or f
+
+if system_type == 'nt':
+ def lock(file, flags):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped)
+
+ def unlock(file):
+ hfile = win32file._get_osfhandle(fd(file))
+ win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped)
+elif system_type == 'posix':
+ def lock(file, flags):
+ fcntl.lockf(fd(file), flags)
+
+ def unlock(file):
+ fcntl.lockf(fd(file), fcntl.LOCK_UN)
+else:
+ # File locking is not supported.
+ LOCK_EX = LOCK_SH = LOCK_NB = None
+
+ # Dummy functions that don't do anything.
+ def lock(file, flags):
+ pass
+
+ def unlock(file):
+ pass
diff --git a/google/appengine/_internal/django/core/files/move.py b/google/appengine/_internal/django/core/files/move.py
new file mode 100644
index 0000000..8e24619
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/move.py
@@ -0,0 +1,88 @@
+"""
+Move a file in the safest way possible::
+
+ >>> from google.appengine._internal.django.core.files.move import file_move_safe
+ >>> file_move_safe("/tmp/old_file", "/tmp/new_file")
+"""
+
+import os
+from google.appengine._internal.django.core.files import locks
+
+try:
+ from shutil import copystat
+except ImportError:
+ import stat
+ def copystat(src, dst):
+ """Copy all stat info (mode bits, atime and mtime) from src to dst"""
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ if hasattr(os, 'utime'):
+ os.utime(dst, (st.st_atime, st.st_mtime))
+ if hasattr(os, 'chmod'):
+ os.chmod(dst, mode)
+
+__all__ = ['file_move_safe']
+
+def _samefile(src, dst):
+ # Macintosh, Unix.
+ if hasattr(os.path,'samefile'):
+ try:
+ return os.path.samefile(src, dst)
+ except OSError:
+ return False
+
+ # All other platforms: check for same pathname.
+ return (os.path.normcase(os.path.abspath(src)) ==
+ os.path.normcase(os.path.abspath(dst)))
+
+def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
+ """
+ Moves a file from one location to another in the safest way possible.
+
+ First, tries ``os.rename``, which is simple but will break across filesystems.
+ If that fails, streams manually from one file to another in pure Python.
+
+ If the destination file exists and ``allow_overwrite`` is ``False``, this
+ function will throw an ``IOError``.
+ """
+
+ # There's no reason to move if we don't have to.
+ if _samefile(old_file_name, new_file_name):
+ return
+
+ try:
+ os.rename(old_file_name, new_file_name)
+ return
+ except OSError:
+ # This will happen with os.rename if moving to another filesystem
+ # or when moving opened files on certain operating systems
+ pass
+
+ # first open the old file, so that it won't go away
+ old_file = open(old_file_name, 'rb')
+ try:
+ # now open the new file, not forgetting allow_overwrite
+ fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
+ (not allow_overwrite and os.O_EXCL or 0))
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ current_chunk = None
+ while current_chunk != '':
+ current_chunk = old_file.read(chunk_size)
+ os.write(fd, current_chunk)
+ finally:
+ locks.unlock(fd)
+ os.close(fd)
+ finally:
+ old_file.close()
+ copystat(old_file_name, new_file_name)
+
+ try:
+ os.remove(old_file_name)
+ except OSError, e:
+ # Certain operating systems (Cygwin and Windows)
+ # fail when deleting opened files, ignore it. (For the
+ # systems where this happens, temporary files will be auto-deleted
+ # on close anyway.)
+ if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13:
+ raise
diff --git a/google/appengine/_internal/django/core/files/storage.py b/google/appengine/_internal/django/core/files/storage.py
new file mode 100644
index 0000000..c9d995f
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/storage.py
@@ -0,0 +1,244 @@
+import os
+import errno
+import urlparse
+import itertools
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
+from google.appengine._internal.django.core.files import locks, File
+from google.appengine._internal.django.core.files.move import file_move_safe
+from google.appengine._internal.django.utils.encoding import force_unicode, filepath_to_uri
+from google.appengine._internal.django.utils.functional import LazyObject
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.utils.text import get_valid_filename
+from google.appengine._internal.django.utils._os import safe_join
+
+__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
+
+class Storage(object):
+ """
+ A base storage class, providing some default behaviors that all other
+ storage systems can inherit or override, as necessary.
+ """
+
+ # The following methods represent a public interface to private methods.
+ # These shouldn't be overridden by subclasses unless absolutely necessary.
+
+ def open(self, name, mode='rb', mixin=None):
+ """
+ Retrieves the specified file from storage, using the optional mixin
+ class to customize what features are available on the File returned.
+ """
+ file = self._open(name, mode)
+ if mixin:
+ # Add the mixin as a parent class of the File returned from storage.
+ file.__class__ = type(mixin.__name__, (mixin, file.__class__), {})
+ return file
+
+ def save(self, name, content):
+ """
+ Saves new content to the file specified by name. The content should be a
+ proper File object, ready to be read from the beginning.
+ """
+ # Get the proper name for the file, as it will actually be saved.
+ if name is None:
+ name = content.name
+
+ name = self.get_available_name(name)
+ name = self._save(name, content)
+
+ # Store filenames with forward slashes, even on Windows
+ return force_unicode(name.replace('\\', '/'))
+
+ # These methods are part of the public API, with default implementations.
+
+ def get_valid_name(self, name):
+ """
+ Returns a filename, based on the provided filename, that's suitable for
+ use in the target storage system.
+ """
+ return get_valid_filename(name)
+
+ def get_available_name(self, name):
+ """
+ Returns a filename that's free on the target storage system, and
+ available for new content to be written to.
+ """
+ dir_name, file_name = os.path.split(name)
+ file_root, file_ext = os.path.splitext(file_name)
+ # If the filename already exists, add an underscore and a number (before
+ # the file extension, if one exists) to the filename until the generated
+ # filename doesn't exist.
+ count = itertools.count(1)
+ while self.exists(name):
+ # file_ext includes the dot.
+ name = os.path.join(dir_name, "%s_%s%s" % (file_root, count.next(), file_ext))
+
+ return name
+
+ def path(self, name):
+ """
+ Returns a local filesystem path where the file can be retrieved using
+ Python's built-in open() function. Storage systems that can't be
+ accessed using open() should *not* implement this method.
+ """
+ raise NotImplementedError("This backend doesn't support absolute paths.")
+
+ # The following methods form the public API for storage systems, but with
+ # no default implementations. Subclasses must implement *all* of these.
+
+ def delete(self, name):
+ """
+ Deletes the specified file from the storage system.
+ """
+ raise NotImplementedError()
+
+ def exists(self, name):
+ """
+ Returns True if a file referened by the given name already exists in the
+ storage system, or False if the name is available for a new file.
+ """
+ raise NotImplementedError()
+
+ def listdir(self, path):
+ """
+ Lists the contents of the specified path, returning a 2-tuple of lists;
+ the first item being directories, the second item being files.
+ """
+ raise NotImplementedError()
+
+ def size(self, name):
+ """
+ Returns the total size, in bytes, of the file specified by name.
+ """
+ raise NotImplementedError()
+
+ def url(self, name):
+ """
+ Returns an absolute URL where the file's contents can be accessed
+ directly by a Web browser.
+ """
+ raise NotImplementedError()
+
+class FileSystemStorage(Storage):
+ """
+ Standard filesystem storage
+ """
+
+ def __init__(self, location=None, base_url=None):
+ if location is None:
+ location = settings.MEDIA_ROOT
+ if base_url is None:
+ base_url = settings.MEDIA_URL
+ self.location = os.path.abspath(location)
+ self.base_url = base_url
+
+ def _open(self, name, mode='rb'):
+ return File(open(self.path(name), mode))
+
+ def _save(self, name, content):
+ full_path = self.path(name)
+
+ directory = os.path.dirname(full_path)
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+ elif not os.path.isdir(directory):
+ raise IOError("%s exists and is not a directory." % directory)
+
+ # There's a potential race condition between get_available_name and
+ # saving the file; it's possible that two threads might return the
+ # same name, at which point all sorts of fun happens. So we need to
+ # try to create the file, but if it already exists we have to go back
+ # to get_available_name() and try again.
+
+ while True:
+ try:
+ # This file has a file path that we can move.
+ if hasattr(content, 'temporary_file_path'):
+ file_move_safe(content.temporary_file_path(), full_path)
+ content.close()
+
+ # This is a normal uploadedfile that we can stream.
+ else:
+ # This fun binary flag incantation makes os.open throw an
+ # OSError if the file already exists before we open it.
+ fd = os.open(full_path, os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0))
+ try:
+ locks.lock(fd, locks.LOCK_EX)
+ for chunk in content.chunks():
+ os.write(fd, chunk)
+ finally:
+ locks.unlock(fd)
+ os.close(fd)
+ except OSError, e:
+ if e.errno == errno.EEXIST:
+ # Ooops, the file exists. We need a new file name.
+ name = self.get_available_name(name)
+ full_path = self.path(name)
+ else:
+ raise
+ else:
+ # OK, the file save worked. Break out of the loop.
+ break
+
+ if settings.FILE_UPLOAD_PERMISSIONS is not None:
+ os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
+
+ return name
+
+ def delete(self, name):
+ name = self.path(name)
+ # If the file exists, delete it from the filesystem.
+ if os.path.exists(name):
+ os.remove(name)
+
+ def exists(self, name):
+ return os.path.exists(self.path(name))
+
+ def listdir(self, path):
+ path = self.path(path)
+ directories, files = [], []
+ for entry in os.listdir(path):
+ if os.path.isdir(os.path.join(path, entry)):
+ directories.append(entry)
+ else:
+ files.append(entry)
+ return directories, files
+
+ def path(self, name):
+ try:
+ path = safe_join(self.location, name)
+ except ValueError:
+ raise SuspiciousOperation("Attempted access to '%s' denied." % name)
+ return os.path.normpath(path)
+
+ def size(self, name):
+ return os.path.getsize(self.path(name))
+
+ def url(self, name):
+ if self.base_url is None:
+ raise ValueError("This file is not accessible via a URL.")
+ return urlparse.urljoin(self.base_url, filepath_to_uri(name))
+
+def get_storage_class(import_path=None):
+ if import_path is None:
+ import_path = settings.DEFAULT_FILE_STORAGE
+ try:
+ dot = import_path.rindex('.')
+ except ValueError:
+ raise ImproperlyConfigured("%s isn't a storage module." % import_path)
+ module, classname = import_path[:dot], import_path[dot+1:]
+ try:
+ mod = import_module(module)
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing storage module %s: "%s"' % (module, e))
+ try:
+ return getattr(mod, classname)
+ except AttributeError:
+ raise ImproperlyConfigured('Storage module "%s" does not define a "%s" class.' % (module, classname))
+
+class DefaultStorage(LazyObject):
+ def _setup(self):
+ self._wrapped = get_storage_class()()
+
+default_storage = DefaultStorage()
diff --git a/google/appengine/_internal/django/core/files/temp.py b/google/appengine/_internal/django/core/files/temp.py
new file mode 100644
index 0000000..3d61d44
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/temp.py
@@ -0,0 +1,56 @@
+"""
+The temp module provides a NamedTemporaryFile that can be re-opened on any
+platform. Most platforms use the standard Python tempfile.TemporaryFile class,
+but MS Windows users are given a custom class.
+
+This is needed because in Windows NT, the default implementation of
+NamedTemporaryFile uses the O_TEMPORARY flag, and thus cannot be reopened [1].
+
+1: http://mail.python.org/pipermail/python-list/2005-December/359474.html
+"""
+
+import os
+import tempfile
+from google.appengine._internal.django.core.files.utils import FileProxyMixin
+
+__all__ = ('NamedTemporaryFile', 'gettempdir',)
+
+if os.name == 'nt':
+ class TemporaryFile(FileProxyMixin):
+ """
+ Temporary file object constructor that works in Windows and supports
+ reopening of the temporary file in windows.
+ """
+ def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='',
+ dir=None):
+ fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
+ dir=dir)
+ self.name = name
+ self.file = os.fdopen(fd, mode, bufsize)
+ self.close_called = False
+
+ # Because close can be called during shutdown
+ # we need to cache os.unlink and access it
+ # as self.unlink only
+ unlink = os.unlink
+
+ def close(self):
+ if not self.close_called:
+ self.close_called = True
+ try:
+ self.file.close()
+ except (OSError, IOError):
+ pass
+ try:
+ self.unlink(self.name)
+ except (OSError):
+ pass
+
+ def __del__(self):
+ self.close()
+
+ NamedTemporaryFile = TemporaryFile
+else:
+ NamedTemporaryFile = tempfile.NamedTemporaryFile
+
+gettempdir = tempfile.gettempdir
diff --git a/google/appengine/_internal/django/core/files/uploadedfile.py b/google/appengine/_internal/django/core/files/uploadedfile.py
new file mode 100644
index 0000000..0661c7e
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/uploadedfile.py
@@ -0,0 +1,128 @@
+"""
+Classes representing uploaded files.
+"""
+
+import os
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.files.base import File
+from google.appengine._internal.django.core.files import temp as tempfile
+from google.appengine._internal.django.utils.encoding import smart_str
+
+__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
+ 'SimpleUploadedFile')
+
+class UploadedFile(File):
+ """
+ A abstract uploaded file (``TemporaryUploadedFile`` and
+ ``InMemoryUploadedFile`` are the built-in concrete subclasses).
+
+ An ``UploadedFile`` object behaves somewhat like a file object and
+ represents some file data that the user submitted with a form.
+ """
+ DEFAULT_CHUNK_SIZE = 64 * 2**10
+
+ def __init__(self, file=None, name=None, content_type=None, size=None, charset=None):
+ super(UploadedFile, self).__init__(file, name)
+ self.size = size
+ self.content_type = content_type
+ self.charset = charset
+
+ def __repr__(self):
+ return "<%s: %s (%s)>" % (
+ self.__class__.__name__, smart_str(self.name), self.content_type)
+
+ def _get_name(self):
+ return self._name
+
+ def _set_name(self, name):
+ # Sanitize the file name so that it can't be dangerous.
+ if name is not None:
+ # Just use the basename of the file -- anything else is dangerous.
+ name = os.path.basename(name)
+
+ # File names longer than 255 characters can cause problems on older OSes.
+ if len(name) > 255:
+ name, ext = os.path.splitext(name)
+ name = name[:255 - len(ext)] + ext
+
+ self._name = name
+
+ name = property(_get_name, _set_name)
+
+class TemporaryUploadedFile(UploadedFile):
+ """
+ A file uploaded to a temporary location (i.e. stream-to-disk).
+ """
+ def __init__(self, name, content_type, size, charset):
+ if settings.FILE_UPLOAD_TEMP_DIR:
+ file = tempfile.NamedTemporaryFile(suffix='.upload',
+ dir=settings.FILE_UPLOAD_TEMP_DIR)
+ else:
+ file = tempfile.NamedTemporaryFile(suffix='.upload')
+ super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset)
+
+ def temporary_file_path(self):
+ """
+ Returns the full path of this file.
+ """
+ return self.file.name
+
+ def close(self):
+ try:
+ return self.file.close()
+ except OSError, e:
+ if e.errno != 2:
+ # Means the file was moved or deleted before the tempfile
+ # could unlink it. Still sets self.file.close_called and
+ # calls self.file.file.close() before the exception
+ raise
+
+class InMemoryUploadedFile(UploadedFile):
+ """
+ A file uploaded into memory (i.e. stream-to-memory).
+ """
+ def __init__(self, file, field_name, name, content_type, size, charset):
+ super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset)
+ self.field_name = field_name
+
+ def open(self, mode=None):
+ self.file.seek(0)
+
+ def close(self):
+ pass
+
+ def chunks(self, chunk_size=None):
+ self.file.seek(0)
+ yield self.read()
+
+ def multiple_chunks(self, chunk_size=None):
+ # Since it's in memory, we'll never have multiple chunks.
+ return False
+
+
+class SimpleUploadedFile(InMemoryUploadedFile):
+ """
+ A simple representation of a file, which just has content, size, and a name.
+ """
+ def __init__(self, name, content, content_type='text/plain'):
+ content = content or ''
+ super(SimpleUploadedFile, self).__init__(StringIO(content), None, name,
+ content_type, len(content), None)
+
+ def from_dict(cls, file_dict):
+ """
+ Creates a SimpleUploadedFile object from
+ a dictionary object with the following keys:
+ - filename
+ - content-type
+ - content
+ """
+ return cls(file_dict['filename'],
+ file_dict['content'],
+ file_dict.get('content-type', 'text/plain'))
+ from_dict = classmethod(from_dict)
diff --git a/google/appengine/_internal/django/core/files/uploadhandler.py b/google/appengine/_internal/django/core/files/uploadhandler.py
new file mode 100644
index 0000000..c97497f
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/uploadhandler.py
@@ -0,0 +1,215 @@
+"""
+Base file upload handler classes, and the built-in concrete subclasses
+"""
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile
+from google.appengine._internal.django.utils import importlib
+
+__all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler',
+ 'TemporaryFileUploadHandler', 'MemoryFileUploadHandler',
+ 'load_handler', 'StopFutureHandlers']
+
+class UploadFileException(Exception):
+ """
+ Any error having to do with uploading files.
+ """
+ pass
+
+class StopUpload(UploadFileException):
+ """
+ This exception is raised when an upload must abort.
+ """
+ def __init__(self, connection_reset=False):
+ """
+ If ``connection_reset`` is ``True``, Django knows will halt the upload
+ without consuming the rest of the upload. This will cause the browser to
+ show a "connection reset" error.
+ """
+ self.connection_reset = connection_reset
+
+ def __unicode__(self):
+ if self.connection_reset:
+ return u'StopUpload: Halt current upload.'
+ else:
+ return u'StopUpload: Consume request data, then halt.'
+
+class SkipFile(UploadFileException):
+ """
+ This exception is raised by an upload handler that wants to skip a given file.
+ """
+ pass
+
+class StopFutureHandlers(UploadFileException):
+ """
+ Upload handers that have handled a file and do not want future handlers to
+ run should raise this exception instead of returning None.
+ """
+ pass
+
+class FileUploadHandler(object):
+ """
+ Base class for streaming upload handlers.
+ """
+ chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB.
+
+ def __init__(self, request=None):
+ self.file_name = None
+ self.content_type = None
+ self.content_length = None
+ self.charset = None
+ self.request = request
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Handle the raw input from the client.
+
+ Parameters:
+
+ :input_data:
+ An object that supports reading via .read().
+ :META:
+ ``request.META``.
+ :content_length:
+ The (integer) value of the Content-Length header from the
+ client.
+ :boundary: The boundary from the Content-Type header. Be sure to
+ prepend two '--'.
+ """
+ pass
+
+ def new_file(self, field_name, file_name, content_type, content_length, charset=None):
+ """
+ Signal that a new file has been started.
+
+ Warning: As with any data from the client, you should not trust
+ content_length (and sometimes won't even get it).
+ """
+ self.field_name = field_name
+ self.file_name = file_name
+ self.content_type = content_type
+ self.content_length = content_length
+ self.charset = charset
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Receive data from the streamed upload parser. ``start`` is the position
+ in the file of the chunk.
+ """
+ raise NotImplementedError()
+
+ def file_complete(self, file_size):
+ """
+ Signal that a file has completed. File size corresponds to the actual
+ size accumulated by all the chunks.
+
+ Subclasses should return a valid ``UploadedFile`` object.
+ """
+ raise NotImplementedError()
+
+ def upload_complete(self):
+ """
+ Signal that the upload is complete. Subclasses should perform cleanup
+ that is necessary for this handler.
+ """
+ pass
+
+class TemporaryFileUploadHandler(FileUploadHandler):
+ """
+ Upload handler that streams data into a temporary file.
+ """
+ def __init__(self, *args, **kwargs):
+ super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
+
+ def new_file(self, file_name, *args, **kwargs):
+ """
+ Create the file object to append to as data is coming in.
+ """
+ super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
+ self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
+
+ def receive_data_chunk(self, raw_data, start):
+ self.file.write(raw_data)
+
+ def file_complete(self, file_size):
+ self.file.seek(0)
+ self.file.size = file_size
+ return self.file
+
+class MemoryFileUploadHandler(FileUploadHandler):
+ """
+ File upload handler to stream uploads into memory (used for small files).
+ """
+
+ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
+ """
+ Use the content_length to signal whether or not this handler should be in use.
+ """
+ # Check the content-length header to see if we should
+ # If the post is too large, we cannot use the Memory handler.
+ if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
+ self.activated = False
+ else:
+ self.activated = True
+
+ def new_file(self, *args, **kwargs):
+ super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
+ if self.activated:
+ self.file = StringIO()
+ raise StopFutureHandlers()
+
+ def receive_data_chunk(self, raw_data, start):
+ """
+ Add the data to the StringIO file.
+ """
+ if self.activated:
+ self.file.write(raw_data)
+ else:
+ return raw_data
+
+ def file_complete(self, file_size):
+ """
+ Return a file object if we're activated.
+ """
+ if not self.activated:
+ return
+
+ self.file.seek(0)
+ return InMemoryUploadedFile(
+ file = self.file,
+ field_name = self.field_name,
+ name = self.file_name,
+ content_type = self.content_type,
+ size = file_size,
+ charset = self.charset
+ )
+
+
+def load_handler(path, *args, **kwargs):
+ """
+ Given a path to a handler, return an instance of that handler.
+
+ E.g.::
+ >>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
+ <TemporaryFileUploadHandler object at 0x...>
+
+ """
+ i = path.rfind('.')
+ module, attr = path[:i], path[i+1:]
+ try:
+ mod = importlib.import_module(module)
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing upload handler module %s: "%s"' % (module, e))
+ except ValueError, e:
+ raise ImproperlyConfigured('Error importing upload handler module. Is FILE_UPLOAD_HANDLERS a correctly defined list or tuple?')
+ try:
+ cls = getattr(mod, attr)
+ except AttributeError:
+ raise ImproperlyConfigured('Module "%s" does not define a "%s" upload handler backend' % (module, attr))
+ return cls(*args, **kwargs)
diff --git a/google/appengine/_internal/django/core/files/utils.py b/google/appengine/_internal/django/core/files/utils.py
new file mode 100644
index 0000000..8cc212f
--- /dev/null
+++ b/google/appengine/_internal/django/core/files/utils.py
@@ -0,0 +1,29 @@
+class FileProxyMixin(object):
+ """
+ A mixin class used to forward file methods to an underlaying file
+ object. The internal file object has to be called "file"::
+
+ class FileProxy(FileProxyMixin):
+ def __init__(self, file):
+ self.file = file
+ """
+
+ encoding = property(lambda self: self.file.encoding)
+ fileno = property(lambda self: self.file.fileno)
+ flush = property(lambda self: self.file.flush)
+ isatty = property(lambda self: self.file.isatty)
+ newlines = property(lambda self: self.file.newlines)
+ read = property(lambda self: self.file.read)
+ readinto = property(lambda self: self.file.readinto)
+ readline = property(lambda self: self.file.readline)
+ readlines = property(lambda self: self.file.readlines)
+ seek = property(lambda self: self.file.seek)
+ softspace = property(lambda self: self.file.softspace)
+ tell = property(lambda self: self.file.tell)
+ truncate = property(lambda self: self.file.truncate)
+ write = property(lambda self: self.file.write)
+ writelines = property(lambda self: self.file.writelines)
+ xreadlines = property(lambda self: self.file.xreadlines)
+
+ def __iter__(self):
+ return iter(self.file)
diff --git a/google/appengine/_internal/django/core/handlers/__init__.py b/google/appengine/_internal/django/core/handlers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/core/handlers/__init__.py
diff --git a/google/appengine/_internal/django/core/handlers/base.py b/google/appengine/_internal/django/core/handlers/base.py
new file mode 100644
index 0000000..cd47a11
--- /dev/null
+++ b/google/appengine/_internal/django/core/handlers/base.py
@@ -0,0 +1,220 @@
+import sys
+
+from google.appengine._internal.django import http
+from google.appengine._internal.django.core import signals
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.importlib import import_module
+
+class BaseHandler(object):
+ # Changes that are always applied to a response (in this order).
+ response_fixes = [
+ http.fix_location_header,
+ http.conditional_content_removal,
+ http.fix_IE_for_attach,
+ http.fix_IE_for_vary,
+ ]
+
+ def __init__(self):
+ self._request_middleware = self._view_middleware = self._response_middleware = self._exception_middleware = None
+
+ def load_middleware(self):
+ """
+ Populate middleware lists from settings.MIDDLEWARE_CLASSES.
+
+ Must be called after the environment is fixed (see __call__).
+ """
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.core import exceptions
+ self._view_middleware = []
+ self._response_middleware = []
+ self._exception_middleware = []
+
+ request_middleware = []
+ for middleware_path in settings.MIDDLEWARE_CLASSES:
+ try:
+ dot = middleware_path.rindex('.')
+ except ValueError:
+ raise exceptions.ImproperlyConfigured('%s isn\'t a middleware module' % middleware_path)
+ mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
+ try:
+ mod = import_module(mw_module)
+ except ImportError, e:
+ raise exceptions.ImproperlyConfigured('Error importing middleware %s: "%s"' % (mw_module, e))
+ try:
+ mw_class = getattr(mod, mw_classname)
+ except AttributeError:
+ raise exceptions.ImproperlyConfigured('Middleware module "%s" does not define a "%s" class' % (mw_module, mw_classname))
+
+ try:
+ mw_instance = mw_class()
+ except exceptions.MiddlewareNotUsed:
+ continue
+
+ if hasattr(mw_instance, 'process_request'):
+ request_middleware.append(mw_instance.process_request)
+ if hasattr(mw_instance, 'process_view'):
+ self._view_middleware.append(mw_instance.process_view)
+ if hasattr(mw_instance, 'process_response'):
+ self._response_middleware.insert(0, mw_instance.process_response)
+ if hasattr(mw_instance, 'process_exception'):
+ self._exception_middleware.insert(0, mw_instance.process_exception)
+
+ # We only assign to this when initialization is complete as it is used
+ # as a flag for initialization being complete.
+ self._request_middleware = request_middleware
+
+ def get_response(self, request):
+ "Returns an HttpResponse object for the given HttpRequest"
+ from google.appengine._internal.django.core import exceptions, urlresolvers
+ from google.appengine._internal.django.conf import settings
+
+ try:
+ try:
+ # Setup default url resolver for this thread.
+ urlconf = settings.ROOT_URLCONF
+ urlresolvers.set_urlconf(urlconf)
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+
+ # Apply request middleware
+ for middleware_method in self._request_middleware:
+ response = middleware_method(request)
+ if response:
+ return response
+
+ if hasattr(request, "urlconf"):
+ # Reset url resolver with a custom urlconf.
+ urlconf = request.urlconf
+ urlresolvers.set_urlconf(urlconf)
+ resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
+
+ callback, callback_args, callback_kwargs = resolver.resolve(
+ request.path_info)
+
+ # Apply view middleware
+ for middleware_method in self._view_middleware:
+ response = middleware_method(request, callback, callback_args, callback_kwargs)
+ if response:
+ return response
+
+ try:
+ response = callback(request, *callback_args, **callback_kwargs)
+ except Exception, e:
+ # If the view raised an exception, run it through exception
+ # middleware, and if the exception middleware returns a
+ # response, use that. Otherwise, reraise the exception.
+ for middleware_method in self._exception_middleware:
+ response = middleware_method(request, e)
+ if response:
+ return response
+ raise
+
+ # Complain if the view returned None (a common error).
+ if response is None:
+ try:
+ view_name = callback.func_name # If it's a function
+ except AttributeError:
+ view_name = callback.__class__.__name__ + '.__call__' # If it's a class
+ raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name))
+
+ return response
+ except http.Http404, e:
+ if settings.DEBUG:
+ from google.appengine._internal.django.views import debug
+ return debug.technical_404_response(request, e)
+ else:
+ try:
+ callback, param_dict = resolver.resolve404()
+ return callback(request, **param_dict)
+ except:
+ try:
+ return self.handle_uncaught_exception(request, resolver, sys.exc_info())
+ finally:
+ receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
+ except exceptions.PermissionDenied:
+ return http.HttpResponseForbidden('<h1>Permission denied</h1>')
+ except SystemExit:
+ # Allow sys.exit() to actually exit. See tickets #1023 and #4701
+ raise
+ except: # Handle everything else, including SuspiciousOperation, etc.
+ # Get the exception info now, in case another exception is thrown later.
+ receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
+ return self.handle_uncaught_exception(request, resolver, sys.exc_info())
+ finally:
+ # Reset URLconf for this thread on the way out for complete
+ # isolation of request.urlconf
+ urlresolvers.set_urlconf(None)
+
+ def handle_uncaught_exception(self, request, resolver, exc_info):
+ """
+ Processing for any otherwise uncaught exceptions (those that will
+ generate HTTP 500 responses). Can be overridden by subclasses who want
+ customised 500 handling.
+
+ Be *very* careful when overriding this because the error could be
+ caused by anything, so assuming something like the database is always
+ available would be an error.
+ """
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.core.mail import mail_admins
+
+ if settings.DEBUG_PROPAGATE_EXCEPTIONS:
+ raise
+
+ if settings.DEBUG:
+ from google.appengine._internal.django.views import debug
+ return debug.technical_500_response(request, *exc_info)
+
+ # When DEBUG is False, send an error message to the admins.
+ subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
+ try:
+ request_repr = repr(request)
+ except:
+ request_repr = "Request repr() unavailable"
+ message = "%s\n\n%s" % (self._get_traceback(exc_info), request_repr)
+ mail_admins(subject, message, fail_silently=True)
+ # If Http500 handler is not installed, re-raise last exception
+ if resolver.urlconf_module is None:
+ raise exc_info[1], None, exc_info[2]
+ # Return an HttpResponse that displays a friendly error message.
+ callback, param_dict = resolver.resolve500()
+ return callback(request, **param_dict)
+
+ def _get_traceback(self, exc_info=None):
+ "Helper function to return the traceback as a string"
+ import traceback
+ return '\n'.join(traceback.format_exception(*(exc_info or sys.exc_info())))
+
+ def apply_response_fixes(self, request, response):
+ """
+ Applies each of the functions in self.response_fixes to the request and
+ response, modifying the response in the process. Returns the new
+ response.
+ """
+ for func in self.response_fixes:
+ response = func(request, response)
+ return response
+
+def get_script_name(environ):
+ """
+ Returns the equivalent of the HTTP request's SCRIPT_NAME environment
+ variable. If Apache mod_rewrite has been used, returns what would have been
+ the script name prior to any rewriting (so it's the script name as seen
+ from the client's perspective), unless DJANGO_USE_POST_REWRITE is set (to
+ anything).
+ """
+ from google.appengine._internal.django.conf import settings
+ if settings.FORCE_SCRIPT_NAME is not None:
+ return force_unicode(settings.FORCE_SCRIPT_NAME)
+
+ # If Apache's mod_rewrite had a whack at the URL, Apache set either
+ # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
+ # rewrites. Unfortunately not every Web server (lighttpd!) passes this
+ # information through all the time, so FORCE_SCRIPT_NAME, above, is still
+ # needed.
+ script_url = environ.get('SCRIPT_URL', u'')
+ if not script_url:
+ script_url = environ.get('REDIRECT_URL', u'')
+ if script_url:
+ return force_unicode(script_url[:-len(environ.get('PATH_INFO', ''))])
+ return force_unicode(environ.get('SCRIPT_NAME', u''))
+
diff --git a/google/appengine/_internal/django/core/handlers/modpython.py b/google/appengine/_internal/django/core/handlers/modpython.py
new file mode 100644
index 0000000..9e0d3ab
--- /dev/null
+++ b/google/appengine/_internal/django/core/handlers/modpython.py
@@ -0,0 +1,228 @@
+import os
+from pprint import pformat
+
+from google.appengine._internal.django import http
+from google.appengine._internal.django.core import signals
+from google.appengine._internal.django.core.handlers.base import BaseHandler
+from google.appengine._internal.django.core.urlresolvers import set_script_prefix
+from google.appengine._internal.django.utils import datastructures
+from google.appengine._internal.django.utils.encoding import force_unicode, smart_str, iri_to_uri
+
+# NOTE: do *not* import settings (or any module which eventually imports
+# settings) until after ModPythonHandler has been called; otherwise os.environ
+# won't be set up correctly (with respect to settings).
+
+class ModPythonRequest(http.HttpRequest):
+ def __init__(self, req):
+ self._req = req
+ # FIXME: This isn't ideal. The request URI may be encoded (it's
+ # non-normalized) slightly differently to the "real" SCRIPT_NAME
+ # and PATH_INFO values. This causes problems when we compute path_info,
+ # below. For now, don't use script names that will be subject to
+ # encoding/decoding.
+ self.path = force_unicode(req.uri)
+ root = req.get_options().get('django.root', '')
+ self.django_root = root
+ # req.path_info isn't necessarily computed correctly in all
+ # circumstances (it's out of mod_python's control a bit), so we use
+ # req.uri and some string manipulations to get the right value.
+ if root and req.uri.startswith(root):
+ self.path_info = force_unicode(req.uri[len(root):])
+ else:
+ self.path_info = self.path
+ if not self.path_info:
+ # Django prefers empty paths to be '/', rather than '', to give us
+ # a common start character for URL patterns. So this is a little
+ # naughty, but also pretty harmless.
+ self.path_info = u'/'
+ self._post_parse_error = False
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ if self._post_parse_error:
+ post = '<could not parse>'
+ else:
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return smart_str(u'<ModPythonRequest\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' %
+ (self.path, unicode(get), unicode(post),
+ unicode(cookies), unicode(meta)))
+
+ def get_full_path(self):
+ # RFC 3986 requires self._req.args to be in the ASCII range, but this
+ # doesn't always happen, so rather than crash, we defensively encode it.
+ return '%s%s' % (self.path, self._req.args and ('?' + iri_to_uri(self._req.args)) or '')
+
+ def is_secure(self):
+ try:
+ return self._req.is_https()
+ except AttributeError:
+ # mod_python < 3.2.10 doesn't have req.is_https().
+ return self._req.subprocess_env.get('HTTPS', '').lower() in ('on', '1')
+
+ def _load_post_and_files(self):
+ "Populates self._post and self._files"
+ if self.method != 'POST':
+ self._post, self._files = http.QueryDict('', encoding=self._encoding), datastructures.MultiValueDict()
+ return
+
+ if 'content-type' in self._req.headers_in and self._req.headers_in['content-type'].startswith('multipart'):
+ self._raw_post_data = ''
+ try:
+ self._post, self._files = self.parse_file_upload(self.META, self._req)
+ except:
+ # See django.core.handlers.wsgi.WSGIHandler for an explanation
+ # of what's going on here.
+ self._post = http.QueryDict('')
+ self._files = datastructures.MultiValueDict()
+ self._post_parse_error = True
+ raise
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ self._get = http.QueryDict(self._req.args, encoding=self._encoding)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self._req.headers_in.get('cookie', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_meta(self):
+ "Lazy loader that returns self.META dictionary"
+ if not hasattr(self, '_meta'):
+ self._meta = {
+ 'AUTH_TYPE': self._req.ap_auth_type,
+ 'CONTENT_LENGTH': self._req.headers_in.get('content-length', 0),
+ 'CONTENT_TYPE': self._req.headers_in.get('content-type'),
+ 'GATEWAY_INTERFACE': 'CGI/1.1',
+ 'PATH_INFO': self.path_info,
+ 'PATH_TRANSLATED': None, # Not supported
+ 'QUERY_STRING': self._req.args,
+ 'REMOTE_ADDR': self._req.connection.remote_ip,
+ 'REMOTE_HOST': None, # DNS lookups not supported
+ 'REMOTE_IDENT': self._req.connection.remote_logname,
+ 'REMOTE_USER': self._req.user,
+ 'REQUEST_METHOD': self._req.method,
+ 'SCRIPT_NAME': self.django_root,
+ 'SERVER_NAME': self._req.server.server_hostname,
+ 'SERVER_PORT': self._req.connection.local_addr[1],
+ 'SERVER_PROTOCOL': self._req.protocol,
+ 'SERVER_SOFTWARE': 'mod_python'
+ }
+ for key, value in self._req.headers_in.items():
+ key = 'HTTP_' + key.upper().replace('-', '_')
+ self._meta[key] = value
+ return self._meta
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ self._raw_post_data = self._req.read()
+ return self._raw_post_data
+
+ def _get_method(self):
+ return self.META['REQUEST_METHOD'].upper()
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ META = property(_get_meta)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+ method = property(_get_method)
+
+class ModPythonHandler(BaseHandler):
+ request_class = ModPythonRequest
+
+ def __call__(self, req):
+ # mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that
+ os.environ.update(req.subprocess_env)
+
+ # now that the environ works we can see the correct settings, so imports
+ # that use settings now can work
+ from google.appengine._internal.django.conf import settings
+
+ # if we need to set up middleware, now that settings works we can do it now.
+ if self._request_middleware is None:
+ self.load_middleware()
+
+ set_script_prefix(req.get_options().get('django.root', ''))
+ signals.request_started.send(sender=self.__class__)
+ try:
+ try:
+ request = self.request_class(req)
+ except UnicodeDecodeError:
+ response = http.HttpResponseBadRequest()
+ else:
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+ response = self.apply_response_fixes(request, response)
+ finally:
+ signals.request_finished.send(sender=self.__class__)
+
+ # Convert our custom HttpResponse object back into the mod_python req.
+ req.content_type = response['Content-Type']
+ for key, value in response.items():
+ if key != 'content-type':
+ req.headers_out[str(key)] = str(value)
+ for c in response.cookies.values():
+ req.headers_out.add('Set-Cookie', c.output(header=''))
+ req.status = response.status_code
+ try:
+ for chunk in response:
+ req.write(chunk)
+ finally:
+ response.close()
+
+ return 0 # mod_python.apache.OK
+
+def handler(req):
+ # mod_python hooks into this function.
+ return ModPythonHandler()(req)
diff --git a/google/appengine/_internal/django/core/handlers/profiler-hotshot.py b/google/appengine/_internal/django/core/handlers/profiler-hotshot.py
new file mode 100644
index 0000000..a490f0c
--- /dev/null
+++ b/google/appengine/_internal/django/core/handlers/profiler-hotshot.py
@@ -0,0 +1,22 @@
+import hotshot, time, os
+from google.appengine._internal.django.core.handlers.modpython import ModPythonHandler
+
+PROFILE_DATA_DIR = "/var/log/cmsprofile"
+
+def handler(req):
+ '''
+ Handler that uses hotshot to store profile data.
+
+ Stores profile data in PROFILE_DATA_DIR. Since hotshot has no way (that I
+ know of) to append profile data to a single file, each request gets its own
+ profile. The file names are in the format <url>.<n>.prof where <url> is
+ the request path with "/" replaced by ".", and <n> is a timestamp with
+ microseconds to prevent overwriting files.
+
+ Use the gather_profile_stats.py script to gather these individual request
+ profiles into aggregated profiles by request path.
+ '''
+ profname = "%s.%.3f.prof" % (req.uri.strip("/").replace('/', '.'), time.time())
+ profname = os.path.join(PROFILE_DATA_DIR, profname)
+ prof = hotshot.Profile(profname)
+ return prof.runcall(ModPythonHandler(), req)
diff --git a/google/appengine/_internal/django/core/handlers/wsgi.py b/google/appengine/_internal/django/core/handlers/wsgi.py
new file mode 100644
index 0000000..18f7f22
--- /dev/null
+++ b/google/appengine/_internal/django/core/handlers/wsgi.py
@@ -0,0 +1,265 @@
+from threading import Lock
+from pprint import pformat
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from google.appengine._internal.django import http
+from google.appengine._internal.django.core import signals
+from google.appengine._internal.django.core.handlers import base
+from google.appengine._internal.django.core.urlresolvers import set_script_prefix
+from google.appengine._internal.django.utils import datastructures
+from google.appengine._internal.django.utils.encoding import force_unicode, iri_to_uri
+
+# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+STATUS_CODE_TEXT = {
+ 100: 'CONTINUE',
+ 101: 'SWITCHING PROTOCOLS',
+ 200: 'OK',
+ 201: 'CREATED',
+ 202: 'ACCEPTED',
+ 203: 'NON-AUTHORITATIVE INFORMATION',
+ 204: 'NO CONTENT',
+ 205: 'RESET CONTENT',
+ 206: 'PARTIAL CONTENT',
+ 300: 'MULTIPLE CHOICES',
+ 301: 'MOVED PERMANENTLY',
+ 302: 'FOUND',
+ 303: 'SEE OTHER',
+ 304: 'NOT MODIFIED',
+ 305: 'USE PROXY',
+ 306: 'RESERVED',
+ 307: 'TEMPORARY REDIRECT',
+ 400: 'BAD REQUEST',
+ 401: 'UNAUTHORIZED',
+ 402: 'PAYMENT REQUIRED',
+ 403: 'FORBIDDEN',
+ 404: 'NOT FOUND',
+ 405: 'METHOD NOT ALLOWED',
+ 406: 'NOT ACCEPTABLE',
+ 407: 'PROXY AUTHENTICATION REQUIRED',
+ 408: 'REQUEST TIMEOUT',
+ 409: 'CONFLICT',
+ 410: 'GONE',
+ 411: 'LENGTH REQUIRED',
+ 412: 'PRECONDITION FAILED',
+ 413: 'REQUEST ENTITY TOO LARGE',
+ 414: 'REQUEST-URI TOO LONG',
+ 415: 'UNSUPPORTED MEDIA TYPE',
+ 416: 'REQUESTED RANGE NOT SATISFIABLE',
+ 417: 'EXPECTATION FAILED',
+ 500: 'INTERNAL SERVER ERROR',
+ 501: 'NOT IMPLEMENTED',
+ 502: 'BAD GATEWAY',
+ 503: 'SERVICE UNAVAILABLE',
+ 504: 'GATEWAY TIMEOUT',
+ 505: 'HTTP VERSION NOT SUPPORTED',
+}
+
+def safe_copyfileobj(fsrc, fdst, length=16*1024, size=0):
+ """
+ A version of shutil.copyfileobj that will not read more than 'size' bytes.
+ This makes it safe from clients sending more than CONTENT_LENGTH bytes of
+ data in the body.
+ """
+ if not size:
+ return
+ while size > 0:
+ buf = fsrc.read(min(length, size))
+ if not buf:
+ break
+ fdst.write(buf)
+ size -= len(buf)
+
+class WSGIRequest(http.HttpRequest):
+ def __init__(self, environ):
+ script_name = base.get_script_name(environ)
+ path_info = force_unicode(environ.get('PATH_INFO', u'/'))
+ if not path_info or path_info == script_name:
+ # Sometimes PATH_INFO exists, but is empty (e.g. accessing
+ # the SCRIPT_NAME URL without a trailing slash). We really need to
+ # operate as if they'd requested '/'. Not amazingly nice to force
+ # the path like this, but should be harmless.
+ #
+ # (The comparison of path_info to script_name is to work around an
+ # apparent bug in flup 1.0.1. Se Django ticket #8490).
+ path_info = u'/'
+ self.environ = environ
+ self.path_info = path_info
+ self.path = '%s%s' % (script_name, path_info)
+ self.META = environ
+ self.META['PATH_INFO'] = path_info
+ self.META['SCRIPT_NAME'] = script_name
+ self.method = environ['REQUEST_METHOD'].upper()
+ self._post_parse_error = False
+
+ def __repr__(self):
+ # Since this is called as part of error handling, we need to be very
+ # robust against potentially malformed input.
+ try:
+ get = pformat(self.GET)
+ except:
+ get = '<could not parse>'
+ if self._post_parse_error:
+ post = '<could not parse>'
+ else:
+ try:
+ post = pformat(self.POST)
+ except:
+ post = '<could not parse>'
+ try:
+ cookies = pformat(self.COOKIES)
+ except:
+ cookies = '<could not parse>'
+ try:
+ meta = pformat(self.META)
+ except:
+ meta = '<could not parse>'
+ return '<WSGIRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % (get, post, cookies, meta)
+
+ def get_full_path(self):
+ # RFC 3986 requires query string arguments to be in the ASCII range.
+ # Rather than crash if this doesn't happen, we encode defensively.
+ return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.environ.get('QUERY_STRING', ''))) or '')
+
+ def is_secure(self):
+ return 'wsgi.url_scheme' in self.environ and self.environ['wsgi.url_scheme'] == 'https'
+
+ def _load_post_and_files(self):
+ # Populates self._post and self._files
+ if self.method == 'POST':
+ if self.environ.get('CONTENT_TYPE', '').startswith('multipart'):
+ self._raw_post_data = ''
+ try:
+ self._post, self._files = self.parse_file_upload(self.META, self.environ['wsgi.input'])
+ except:
+ # An error occured while parsing POST data. Since when
+ # formatting the error the request handler might access
+ # self.POST, set self._post and self._file to prevent
+ # attempts to parse POST data again.
+ self._post = http.QueryDict('')
+ self._files = datastructures.MultiValueDict()
+ # Mark that an error occured. This allows self.__repr__ to
+ # be explicit about it instead of simply representing an
+ # empty POST
+ self._post_parse_error = True
+ raise
+ else:
+ self._post, self._files = http.QueryDict(self.raw_post_data, encoding=self._encoding), datastructures.MultiValueDict()
+ else:
+ self._post, self._files = http.QueryDict('', encoding=self._encoding), datastructures.MultiValueDict()
+
+ def _get_request(self):
+ if not hasattr(self, '_request'):
+ self._request = datastructures.MergeDict(self.POST, self.GET)
+ return self._request
+
+ def _get_get(self):
+ if not hasattr(self, '_get'):
+ # The WSGI spec says 'QUERY_STRING' may be absent.
+ self._get = http.QueryDict(self.environ.get('QUERY_STRING', ''), encoding=self._encoding)
+ return self._get
+
+ def _set_get(self, get):
+ self._get = get
+
+ def _get_post(self):
+ if not hasattr(self, '_post'):
+ self._load_post_and_files()
+ return self._post
+
+ def _set_post(self, post):
+ self._post = post
+
+ def _get_cookies(self):
+ if not hasattr(self, '_cookies'):
+ self._cookies = http.parse_cookie(self.environ.get('HTTP_COOKIE', ''))
+ return self._cookies
+
+ def _set_cookies(self, cookies):
+ self._cookies = cookies
+
+ def _get_files(self):
+ if not hasattr(self, '_files'):
+ self._load_post_and_files()
+ return self._files
+
+ def _get_raw_post_data(self):
+ try:
+ return self._raw_post_data
+ except AttributeError:
+ buf = StringIO()
+ try:
+ # CONTENT_LENGTH might be absent if POST doesn't have content at all (lighttpd)
+ content_length = int(self.environ.get('CONTENT_LENGTH', 0))
+ except (ValueError, TypeError):
+ # If CONTENT_LENGTH was empty string or not an integer, don't
+ # error out. We've also seen None passed in here (against all
+ # specs, but see ticket #8259), so we handle TypeError as well.
+ content_length = 0
+ if content_length > 0:
+ safe_copyfileobj(self.environ['wsgi.input'], buf,
+ size=content_length)
+ self._raw_post_data = buf.getvalue()
+ buf.close()
+ return self._raw_post_data
+
+ GET = property(_get_get, _set_get)
+ POST = property(_get_post, _set_post)
+ COOKIES = property(_get_cookies, _set_cookies)
+ FILES = property(_get_files)
+ REQUEST = property(_get_request)
+ raw_post_data = property(_get_raw_post_data)
+
+class WSGIHandler(base.BaseHandler):
+ initLock = Lock()
+ request_class = WSGIRequest
+
+ def __call__(self, environ, start_response):
+ from google.appengine._internal.django.conf import settings
+
+ # Set up middleware if needed. We couldn't do this earlier, because
+ # settings weren't available.
+ if self._request_middleware is None:
+ self.initLock.acquire()
+ try:
+ try:
+ # Check that middleware is still uninitialised.
+ if self._request_middleware is None:
+ self.load_middleware()
+ except:
+ # Unload whatever middleware we got
+ self._request_middleware = None
+ raise
+ finally:
+ self.initLock.release()
+
+ set_script_prefix(base.get_script_name(environ))
+ signals.request_started.send(sender=self.__class__)
+ try:
+ try:
+ request = self.request_class(environ)
+ except UnicodeDecodeError:
+ response = http.HttpResponseBadRequest()
+ else:
+ response = self.get_response(request)
+
+ # Apply response middleware
+ for middleware_method in self._response_middleware:
+ response = middleware_method(request, response)
+ response = self.apply_response_fixes(request, response)
+ finally:
+ signals.request_finished.send(sender=self.__class__)
+
+ try:
+ status_text = STATUS_CODE_TEXT[response.status_code]
+ except KeyError:
+ status_text = 'UNKNOWN STATUS CODE'
+ status = '%s %s' % (response.status_code, status_text)
+ response_headers = [(str(k), str(v)) for k, v in response.items()]
+ for c in response.cookies.values():
+ response_headers.append(('Set-Cookie', str(c.output(header=''))))
+ start_response(status, response_headers)
+ return response
+
diff --git a/google/appengine/_internal/django/core/mail/__init__.py b/google/appengine/_internal/django/core/mail/__init__.py
new file mode 100644
index 0000000..aa8932c
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/__init__.py
@@ -0,0 +1,107 @@
+"""
+Tools for sending email.
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.utils.importlib import import_module
+
+# Imported for backwards compatibility, and for the sake
+# of a cleaner namespace. These symbols used to be in
+# django/core/mail.py before the introduction of email
+# backends and the subsequent reorganization (See #10355)
+from google.appengine._internal.django.core.mail.utils import CachedDnsName, DNS_NAME
+from google.appengine._internal.django.core.mail.message import EmailMessage, EmailMultiAlternatives, SafeMIMEText, SafeMIMEMultipart, DEFAULT_ATTACHMENT_MIME_TYPE, make_msgid, BadHeaderError, forbid_multi_line_headers
+from google.appengine._internal.django.core.mail.backends.smtp import EmailBackend as _SMTPConnection
+
+def get_connection(backend=None, fail_silently=False, **kwds):
+ """Load an e-mail backend and return an instance of it.
+
+ If backend is None (default) settings.EMAIL_BACKEND is used.
+
+ Both fail_silently and other keyword arguments are used in the
+ constructor of the backend.
+ """
+ path = backend or settings.EMAIL_BACKEND
+ try:
+ mod_name, klass_name = path.rsplit('.', 1)
+ mod = import_module(mod_name)
+ except ImportError, e:
+ raise ImproperlyConfigured(('Error importing email backend module %s: "%s"'
+ % (mod_name, e)))
+ try:
+ klass = getattr(mod, klass_name)
+ except AttributeError:
+ raise ImproperlyConfigured(('Module "%s" does not define a '
+ '"%s" class' % (mod_name, klass_name)))
+ return klass(fail_silently=fail_silently, **kwds)
+
+
+def send_mail(subject, message, from_email, recipient_list,
+ fail_silently=False, auth_user=None, auth_password=None,
+ connection=None):
+ """
+ Easy wrapper for sending a single message to a recipient list. All members
+ of the recipient list will see the other recipients in the 'To' field.
+
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = connection or get_connection(username=auth_user,
+ password=auth_password,
+ fail_silently=fail_silently)
+ return EmailMessage(subject, message, from_email, recipient_list,
+ connection=connection).send()
+
+
+def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
+ auth_password=None, connection=None):
+ """
+ Given a datatuple of (subject, message, from_email, recipient_list), sends
+ each message to each recipient list. Returns the number of e-mails sent.
+
+ If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
+ If auth_user and auth_password are set, they're used to log in.
+ If auth_user is None, the EMAIL_HOST_USER setting is used.
+ If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
+
+ Note: The API for this method is frozen. New code wanting to extend the
+ functionality should use the EmailMessage class directly.
+ """
+ connection = connection or get_connection(username=auth_user,
+ password=auth_password,
+ fail_silently=fail_silently)
+ messages = [EmailMessage(subject, message, sender, recipient)
+ for subject, message, sender, recipient in datatuple]
+ return connection.send_messages(messages)
+
+
+def mail_admins(subject, message, fail_silently=False, connection=None):
+ """Sends a message to the admins, as defined by the ADMINS setting."""
+ if not settings.ADMINS:
+ return
+ EmailMessage(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message,
+ settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
+ connection=connection).send(fail_silently=fail_silently)
+
+
+def mail_managers(subject, message, fail_silently=False, connection=None):
+ """Sends a message to the managers, as defined by the MANAGERS setting."""
+ if not settings.MANAGERS:
+ return
+ EmailMessage(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message,
+ settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS],
+ connection=connection).send(fail_silently=fail_silently)
+
+
+class SMTPConnection(_SMTPConnection):
+ def __init__(self, *args, **kwds):
+ import warnings
+ warnings.warn(
+ 'mail.SMTPConnection is deprecated; use mail.get_connection() instead.',
+ PendingDeprecationWarning
+ )
+ super(SMTPConnection, self).__init__(*args, **kwds)
diff --git a/google/appengine/_internal/django/core/mail/backends/__init__.py b/google/appengine/_internal/django/core/mail/backends/__init__.py
new file mode 100644
index 0000000..5973b49
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/__init__.py
@@ -0,0 +1 @@
+# Mail backends shipped with Django.
diff --git a/google/appengine/_internal/django/core/mail/backends/base.py b/google/appengine/_internal/django/core/mail/backends/base.py
new file mode 100644
index 0000000..9a30928
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/base.py
@@ -0,0 +1,39 @@
+"""Base email backend class."""
+
+class BaseEmailBackend(object):
+ """
+ Base class for email backend implementations.
+
+ Subclasses must at least overwrite send_messages().
+ """
+ def __init__(self, fail_silently=False, **kwargs):
+ self.fail_silently = fail_silently
+
+ def open(self):
+ """Open a network connection.
+
+ This method can be overwritten by backend implementations to
+ open a network connection.
+
+ It's up to the backend implementation to track the status of
+ a network connection if it's needed by the backend.
+
+ This method can be called by applications to force a single
+ network connection to be used when sending mails. See the
+ send_messages() method of the SMTP backend for a reference
+ implementation.
+
+ The default implementation does nothing.
+ """
+ pass
+
+ def close(self):
+ """Close a network connection."""
+ pass
+
+ def send_messages(self, email_messages):
+ """
+ Sends one or more EmailMessage objects and returns the number of email
+ messages sent.
+ """
+ raise NotImplementedError
diff --git a/google/appengine/_internal/django/core/mail/backends/console.py b/google/appengine/_internal/django/core/mail/backends/console.py
new file mode 100644
index 0000000..cda9ce2
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/console.py
@@ -0,0 +1,37 @@
+"""
+Email backend that writes messages to console instead of sending them.
+"""
+import sys
+import threading
+
+from google.appengine._internal.django.core.mail.backends.base import BaseEmailBackend
+
+class EmailBackend(BaseEmailBackend):
+ def __init__(self, *args, **kwargs):
+ self.stream = kwargs.pop('stream', sys.stdout)
+ self._lock = threading.RLock()
+ super(EmailBackend, self).__init__(*args, **kwargs)
+
+ def send_messages(self, email_messages):
+ """Write all messages to the stream in a thread-safe way."""
+ if not email_messages:
+ return
+ self._lock.acquire()
+ try:
+ # The try-except is nested to allow for
+ # Python 2.4 support (Refs #12147)
+ try:
+ stream_created = self.open()
+ for message in email_messages:
+ self.stream.write('%s\n' % message.message().as_string())
+ self.stream.write('-'*79)
+ self.stream.write('\n')
+ self.stream.flush() # flush after each message
+ if stream_created:
+ self.close()
+ except:
+ if not self.fail_silently:
+ raise
+ finally:
+ self._lock.release()
+ return len(email_messages)
diff --git a/google/appengine/_internal/django/core/mail/backends/dummy.py b/google/appengine/_internal/django/core/mail/backends/dummy.py
new file mode 100644
index 0000000..111d79e
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/dummy.py
@@ -0,0 +1,9 @@
+"""
+Dummy email backend that does nothing.
+"""
+
+from google.appengine._internal.django.core.mail.backends.base import BaseEmailBackend
+
+class EmailBackend(BaseEmailBackend):
+ def send_messages(self, email_messages):
+ return len(email_messages)
diff --git a/google/appengine/_internal/django/core/mail/backends/filebased.py b/google/appengine/_internal/django/core/mail/backends/filebased.py
new file mode 100644
index 0000000..86f0b14
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/filebased.py
@@ -0,0 +1,59 @@
+"""Email backend that writes messages to a file."""
+
+import datetime
+import os
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
+
+class EmailBackend(ConsoleEmailBackend):
+ def __init__(self, *args, **kwargs):
+ self._fname = None
+ if 'file_path' in kwargs:
+ self.file_path = kwargs.pop('file_path')
+ else:
+ self.file_path = getattr(settings, 'EMAIL_FILE_PATH',None)
+ # Make sure self.file_path is a string.
+ if not isinstance(self.file_path, basestring):
+ raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
+ self.file_path = os.path.abspath(self.file_path)
+ # Make sure that self.file_path is an directory if it exists.
+ if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
+ raise ImproperlyConfigured('Path for saving email messages exists, but is not a directory: %s' % self.file_path)
+ # Try to create it, if it not exists.
+ elif not os.path.exists(self.file_path):
+ try:
+ os.makedirs(self.file_path)
+ except OSError, err:
+ raise ImproperlyConfigured('Could not create directory for saving email messages: %s (%s)' % (self.file_path, err))
+ # Make sure that self.file_path is writable.
+ if not os.access(self.file_path, os.W_OK):
+ raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
+ # Finally, call super().
+ # Since we're using the console-based backend as a base,
+ # force the stream to be None, so we don't default to stdout
+ kwargs['stream'] = None
+ super(EmailBackend, self).__init__(*args, **kwargs)
+
+ def _get_filename(self):
+ """Return a unique file name."""
+ if self._fname is None:
+ timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
+ fname = "%s-%s.log" % (timestamp, abs(id(self)))
+ self._fname = os.path.join(self.file_path, fname)
+ return self._fname
+
+ def open(self):
+ if self.stream is None:
+ self.stream = open(self._get_filename(), 'a')
+ return True
+ return False
+
+ def close(self):
+ try:
+ if self.stream is not None:
+ self.stream.close()
+ finally:
+ self.stream = None
+
diff --git a/google/appengine/_internal/django/core/mail/backends/locmem.py b/google/appengine/_internal/django/core/mail/backends/locmem.py
new file mode 100644
index 0000000..45ca1a8
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/locmem.py
@@ -0,0 +1,24 @@
+"""
+Backend for test environment.
+"""
+
+from google.appengine._internal.django.core import mail
+from google.appengine._internal.django.core.mail.backends.base import BaseEmailBackend
+
+class EmailBackend(BaseEmailBackend):
+ """A email backend for use during test sessions.
+
+ The test connection stores email messages in a dummy outbox,
+ rather than sending them out on the wire.
+
+ The dummy outbox is accessible through the outbox instance attribute.
+ """
+ def __init__(self, *args, **kwargs):
+ super(EmailBackend, self).__init__(*args, **kwargs)
+ if not hasattr(mail, 'outbox'):
+ mail.outbox = []
+
+ def send_messages(self, messages):
+ """Redirect messages to the dummy outbox"""
+ mail.outbox.extend(messages)
+ return len(messages)
diff --git a/google/appengine/_internal/django/core/mail/backends/smtp.py b/google/appengine/_internal/django/core/mail/backends/smtp.py
new file mode 100644
index 0000000..4b55786
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/backends/smtp.py
@@ -0,0 +1,109 @@
+"""SMTP email backend class."""
+import smtplib
+import socket
+import threading
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.mail.backends.base import BaseEmailBackend
+from google.appengine._internal.django.core.mail.utils import DNS_NAME
+from google.appengine._internal.django.core.mail.message import sanitize_address
+
+
+class EmailBackend(BaseEmailBackend):
+ """
+ A wrapper that manages the SMTP network connection.
+ """
+ def __init__(self, host=None, port=None, username=None, password=None,
+ use_tls=None, fail_silently=False, **kwargs):
+ super(EmailBackend, self).__init__(fail_silently=fail_silently)
+ self.host = host or settings.EMAIL_HOST
+ self.port = port or settings.EMAIL_PORT
+ self.username = username or settings.EMAIL_HOST_USER
+ self.password = password or settings.EMAIL_HOST_PASSWORD
+ if use_tls is None:
+ self.use_tls = settings.EMAIL_USE_TLS
+ else:
+ self.use_tls = use_tls
+ self.connection = None
+ self._lock = threading.RLock()
+
+ def open(self):
+ """
+ Ensures we have a connection to the email server. Returns whether or
+ not a new connection was required (True or False).
+ """
+ if self.connection:
+ # Nothing to do if the connection is already open.
+ return False
+ try:
+ # If local_hostname is not specified, socket.getfqdn() gets used.
+ # For performance, we use the cached FQDN for local_hostname.
+ self.connection = smtplib.SMTP(self.host, self.port,
+ local_hostname=DNS_NAME.get_fqdn())
+ if self.use_tls:
+ self.connection.ehlo()
+ self.connection.starttls()
+ self.connection.ehlo()
+ if self.username and self.password:
+ self.connection.login(self.username, self.password)
+ return True
+ except:
+ if not self.fail_silently:
+ raise
+
+ def close(self):
+ """Closes the connection to the email server."""
+ try:
+ try:
+ self.connection.quit()
+ except socket.sslerror:
+ # This happens when calling quit() on a TLS connection
+ # sometimes.
+ self.connection.close()
+ except:
+ if self.fail_silently:
+ return
+ raise
+ finally:
+ self.connection = None
+
+ def send_messages(self, email_messages):
+ """
+ Sends one or more EmailMessage objects and returns the number of email
+ messages sent.
+ """
+ if not email_messages:
+ return
+ self._lock.acquire()
+ try:
+ new_conn_created = self.open()
+ if not self.connection:
+ # We failed silently on open().
+ # Trying to send would be pointless.
+ return
+ num_sent = 0
+ for message in email_messages:
+ sent = self._send(message)
+ if sent:
+ num_sent += 1
+ if new_conn_created:
+ self.close()
+ finally:
+ self._lock.release()
+ return num_sent
+
+ def _send(self, email_message):
+ """A helper method that does the actual sending."""
+ if not email_message.recipients():
+ return False
+ from_email = sanitize_address(email_message.from_email, email_message.encoding)
+ recipients = [sanitize_address(addr, email_message.encoding)
+ for addr in email_message.recipients()]
+ try:
+ self.connection.sendmail(from_email, recipients,
+ email_message.message().as_string())
+ except:
+ if not self.fail_silently:
+ raise
+ return False
+ return True
diff --git a/google/appengine/_internal/django/core/mail/message.py b/google/appengine/_internal/django/core/mail/message.py
new file mode 100644
index 0000000..c04e08d
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/message.py
@@ -0,0 +1,320 @@
+import mimetypes
+import os
+import random
+import time
+from email import Charset, Encoders
+from email.MIMEText import MIMEText
+from email.MIMEMultipart import MIMEMultipart
+from email.MIMEBase import MIMEBase
+from email.Header import Header
+from email.Utils import formatdate, getaddresses, formataddr
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.mail.utils import DNS_NAME
+from google.appengine._internal.django.utils.encoding import smart_str, force_unicode
+from email.Utils import parseaddr
+
+# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
+# some spam filters.
+Charset.add_charset('utf-8', Charset.SHORTEST, Charset.QP, 'utf-8')
+
+# Default MIME type to use on attachments (if it is not explicitly given
+# and cannot be guessed).
+DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
+
+
+class BadHeaderError(ValueError):
+ pass
+
+
+# Copied from Python standard library, with the following modifications:
+# * Used cached hostname for performance.
+# * Added try/except to support lack of getpid() in Jython (#5496).
+def make_msgid(idstring=None):
+ """Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
+
+ <20020201195627.33539.96671@nightshade.la.mastaler.com>
+
+ Optional idstring if given is a string used to strengthen the
+ uniqueness of the message id.
+ """
+ timeval = time.time()
+ utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
+ try:
+ pid = os.getpid()
+ except AttributeError:
+ # No getpid() in Jython, for example.
+ pid = 1
+ randint = random.randrange(100000)
+ if idstring is None:
+ idstring = ''
+ else:
+ idstring = '.' + idstring
+ idhost = DNS_NAME
+ msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
+ return msgid
+
+
+# Header names that contain structured address data (RFC #5322)
+ADDRESS_HEADERS = set([
+ 'from',
+ 'sender',
+ 'reply-to',
+ 'to',
+ 'cc',
+ 'bcc',
+ 'resent-from',
+ 'resent-sender',
+ 'resent-to',
+ 'resent-cc',
+ 'resent-bcc',
+])
+
+
+def forbid_multi_line_headers(name, val, encoding):
+ """Forbids multi-line headers, to prevent header injection."""
+ encoding = encoding or settings.DEFAULT_CHARSET
+ val = force_unicode(val)
+ if '\n' in val or '\r' in val:
+ raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
+ try:
+ val = val.encode('ascii')
+ except UnicodeEncodeError:
+ if name.lower() in ADDRESS_HEADERS:
+ val = ', '.join(sanitize_address(addr, encoding)
+ for addr in getaddresses((val,)))
+ else:
+ val = str(Header(val, encoding))
+ else:
+ if name.lower() == 'subject':
+ val = Header(val)
+ return name, val
+
+
+def sanitize_address(addr, encoding):
+ if isinstance(addr, basestring):
+ addr = parseaddr(force_unicode(addr))
+ nm, addr = addr
+ nm = str(Header(nm, encoding))
+ try:
+ addr = addr.encode('ascii')
+ except UnicodeEncodeError: # IDN
+ if u'@' in addr:
+ localpart, domain = addr.split(u'@', 1)
+ localpart = str(Header(localpart, encoding))
+ domain = domain.encode('idna')
+ addr = '@'.join([localpart, domain])
+ else:
+ addr = str(Header(addr, encoding))
+ return formataddr((nm, addr))
+
+
+class SafeMIMEText(MIMEText):
+
+ def __init__(self, text, subtype, charset):
+ self.encoding = charset
+ MIMEText.__init__(self, text, subtype, charset)
+
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val, self.encoding)
+ MIMEText.__setitem__(self, name, val)
+
+
+class SafeMIMEMultipart(MIMEMultipart):
+
+ def __init__(self, _subtype='mixed', boundary=None, _subparts=None, encoding=None, **_params):
+ self.encoding = encoding
+ MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params)
+
+ def __setitem__(self, name, val):
+ name, val = forbid_multi_line_headers(name, val, self.encoding)
+ MIMEMultipart.__setitem__(self, name, val)
+
+
+class EmailMessage(object):
+ """
+ A container for email information.
+ """
+ content_subtype = 'plain'
+ mixed_subtype = 'mixed'
+ encoding = None # None => use settings default
+
+ def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
+ connection=None, attachments=None, headers=None):
+ """
+ Initialize a single email message (which can be sent to multiple
+ recipients).
+
+ All strings used to create the message can be unicode strings
+ (or UTF-8 bytestrings). The SafeMIMEText class will handle any
+ necessary encoding conversions.
+ """
+ if to:
+ assert not isinstance(to, basestring), '"to" argument must be a list or tuple'
+ self.to = list(to)
+ else:
+ self.to = []
+ if bcc:
+ assert not isinstance(bcc, basestring), '"bcc" argument must be a list or tuple'
+ self.bcc = list(bcc)
+ else:
+ self.bcc = []
+ self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
+ self.subject = subject
+ self.body = body
+ self.attachments = attachments or []
+ self.extra_headers = headers or {}
+ self.connection = connection
+
+ def get_connection(self, fail_silently=False):
+ from google.appengine._internal.django.core.mail import get_connection
+ if not self.connection:
+ self.connection = get_connection(fail_silently=fail_silently)
+ return self.connection
+
+ def message(self):
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ msg = SafeMIMEText(smart_str(self.body, encoding),
+ self.content_subtype, encoding)
+ msg = self._create_message(msg)
+ msg['Subject'] = self.subject
+ msg['From'] = self.extra_headers.get('From', self.from_email)
+ msg['To'] = ', '.join(self.to)
+
+ # Email header names are case-insensitive (RFC 2045), so we have to
+ # accommodate that when doing comparisons.
+ header_names = [key.lower() for key in self.extra_headers]
+ if 'date' not in header_names:
+ msg['Date'] = formatdate()
+ if 'message-id' not in header_names:
+ msg['Message-ID'] = make_msgid()
+ for name, value in self.extra_headers.items():
+ if name.lower() == 'from': # From is already handled
+ continue
+ msg[name] = value
+ return msg
+
+ def recipients(self):
+ """
+ Returns a list of all recipients of the email (includes direct
+ addressees as well as Bcc entries).
+ """
+ return self.to + self.bcc
+
+ def send(self, fail_silently=False):
+ """Sends the email message."""
+ if not self.recipients():
+ # Don't bother creating the network connection if there's nobody to
+ # send to.
+ return 0
+ return self.get_connection(fail_silently).send_messages([self])
+
+ def attach(self, filename=None, content=None, mimetype=None):
+ """
+ Attaches a file with the given filename and content. The filename can
+ be omitted and the mimetype is guessed, if not provided.
+
+ If the first parameter is a MIMEBase subclass it is inserted directly
+ into the resulting message attachments.
+ """
+ if isinstance(filename, MIMEBase):
+ assert content == mimetype == None
+ self.attachments.append(filename)
+ else:
+ assert content is not None
+ self.attachments.append((filename, content, mimetype))
+
+ def attach_file(self, path, mimetype=None):
+ """Attaches a file from the filesystem."""
+ filename = os.path.basename(path)
+ content = open(path, 'rb').read()
+ self.attach(filename, content, mimetype)
+
+ def _create_message(self, msg):
+ return self._create_attachments(msg)
+
+ def _create_attachments(self, msg):
+ if self.attachments:
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ body_msg = msg
+ msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding)
+ if self.body:
+ msg.attach(body_msg)
+ for attachment in self.attachments:
+ if isinstance(attachment, MIMEBase):
+ msg.attach(attachment)
+ else:
+ msg.attach(self._create_attachment(*attachment))
+ return msg
+
+ def _create_mime_attachment(self, content, mimetype):
+ """
+ Converts the content, mimetype pair into a MIME attachment object.
+ """
+ basetype, subtype = mimetype.split('/', 1)
+ if basetype == 'text':
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ attachment = SafeMIMEText(smart_str(content, encoding), subtype, encoding)
+ else:
+ # Encode non-text attachments with base64.
+ attachment = MIMEBase(basetype, subtype)
+ attachment.set_payload(content)
+ Encoders.encode_base64(attachment)
+ return attachment
+
+ def _create_attachment(self, filename, content, mimetype=None):
+ """
+ Converts the filename, content, mimetype triple into a MIME attachment
+ object.
+ """
+ if mimetype is None:
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype is None:
+ mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
+ attachment = self._create_mime_attachment(content, mimetype)
+ if filename:
+ attachment.add_header('Content-Disposition', 'attachment',
+ filename=filename)
+ return attachment
+
+
+class EmailMultiAlternatives(EmailMessage):
+ """
+ A version of EmailMessage that makes it easy to send multipart/alternative
+ messages. For example, including text and HTML versions of the text is
+ made easier.
+ """
+ alternative_subtype = 'alternative'
+
+ def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
+ connection=None, attachments=None, headers=None, alternatives=None):
+ """
+ Initialize a single email message (which can be sent to multiple
+ recipients).
+
+ All strings used to create the message can be unicode strings (or UTF-8
+ bytestrings). The SafeMIMEText class will handle any necessary encoding
+ conversions.
+ """
+ super(EmailMultiAlternatives, self).__init__(subject, body, from_email, to, bcc, connection, attachments, headers)
+ self.alternatives = alternatives or []
+
+ def attach_alternative(self, content, mimetype):
+ """Attach an alternative content representation."""
+ assert content is not None
+ assert mimetype is not None
+ self.alternatives.append((content, mimetype))
+
+ def _create_message(self, msg):
+ return self._create_attachments(self._create_alternatives(msg))
+
+ def _create_alternatives(self, msg):
+ encoding = self.encoding or settings.DEFAULT_CHARSET
+ if self.alternatives:
+ body_msg = msg
+ msg = SafeMIMEMultipart(_subtype=self.alternative_subtype, encoding=encoding)
+ if self.body:
+ msg.attach(body_msg)
+ for alternative in self.alternatives:
+ msg.attach(self._create_mime_attachment(*alternative))
+ return msg
diff --git a/google/appengine/_internal/django/core/mail/utils.py b/google/appengine/_internal/django/core/mail/utils.py
new file mode 100644
index 0000000..322a3a1
--- /dev/null
+++ b/google/appengine/_internal/django/core/mail/utils.py
@@ -0,0 +1,19 @@
+"""
+Email message and email sending related helper functions.
+"""
+
+import socket
+
+
+# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
+# seconds, which slows down the restart of the server.
+class CachedDnsName(object):
+ def __str__(self):
+ return self.get_fqdn()
+
+ def get_fqdn(self):
+ if not hasattr(self, '_fqdn'):
+ self._fqdn = socket.getfqdn()
+ return self._fqdn
+
+DNS_NAME = CachedDnsName()
diff --git a/google/appengine/_internal/django/core/management/__init__.py b/google/appengine/_internal/django/core/management/__init__.py
new file mode 100644
index 0000000..c719ff5
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/__init__.py
@@ -0,0 +1,437 @@
+import os
+import sys
+from optparse import OptionParser, NO_DEFAULT
+import imp
+
+import django
+from google.appengine._internal.django.core.management.base import BaseCommand, CommandError, handle_default_options
+from google.appengine._internal.django.utils.importlib import import_module
+
+# For backwards compatibility: get_version() used to be in this module.
+get_version = django.get_version
+
+# A cache of loaded commands, so that call_command
+# doesn't have to reload every time it's called.
+_commands = None
+
+def find_commands(management_dir):
+ """
+ Given a path to a management directory, returns a list of all the command
+ names that are available.
+
+ Returns an empty list if no commands are defined.
+ """
+ command_dir = os.path.join(management_dir, 'commands')
+ try:
+ return [f[:-3] for f in os.listdir(command_dir)
+ if not f.startswith('_') and f.endswith('.py')]
+ except OSError:
+ return []
+
+def find_management_module(app_name):
+ """
+ Determines the path to the management module for the given app_name,
+ without actually importing the application or the management module.
+
+ Raises ImportError if the management module cannot be found for any reason.
+ """
+ parts = app_name.split('.')
+ parts.append('management')
+ parts.reverse()
+ part = parts.pop()
+ path = None
+
+ # When using manage.py, the project module is added to the path,
+ # loaded, then removed from the path. This means that
+ # testproject.testapp.models can be loaded in future, even if
+ # testproject isn't in the path. When looking for the management
+ # module, we need look for the case where the project name is part
+ # of the app_name but the project directory itself isn't on the path.
+ try:
+ f, path, descr = imp.find_module(part,path)
+ except ImportError,e:
+ if os.path.basename(os.getcwd()) != part:
+ raise e
+
+ while parts:
+ part = parts.pop()
+ f, path, descr = imp.find_module(part, path and [path] or None)
+ return path
+
+def load_command_class(app_name, name):
+ """
+ Given a command name and an application name, returns the Command
+ class instance. All errors raised by the import process
+ (ImportError, AttributeError) are allowed to propagate.
+ """
+ module = import_module('%s.management.commands.%s' % (app_name, name))
+ return module.Command()
+
+def get_commands():
+ """
+ Returns a dictionary mapping command names to their callback applications.
+
+ This works by looking for a management.commands package in django.core, and
+ in each installed application -- if a commands package exists, all commands
+ in that package are registered.
+
+ Core commands are always included. If a settings module has been
+ specified, user-defined commands will also be included, the
+ startproject command will be disabled, and the startapp command
+ will be modified to use the directory in which the settings module appears.
+
+ The dictionary is in the format {command_name: app_name}. Key-value
+ pairs from this dictionary can then be used in calls to
+ load_command_class(app_name, command_name)
+
+ If a specific version of a command must be loaded (e.g., with the
+ startapp command), the instantiated module can be placed in the
+ dictionary in place of the application name.
+
+ The dictionary is cached on the first call and reused on subsequent
+ calls.
+ """
+ global _commands
+ if _commands is None:
+ _commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
+
+ # Find the installed apps
+ try:
+ from google.appengine._internal.django.conf import settings
+ apps = settings.INSTALLED_APPS
+ except (AttributeError, EnvironmentError, ImportError):
+ apps = []
+
+ # Find the project directory
+ try:
+ from google.appengine._internal.django.conf import settings
+ module = import_module(settings.SETTINGS_MODULE)
+ project_directory = setup_environ(module, settings.SETTINGS_MODULE)
+ except (AttributeError, EnvironmentError, ImportError, KeyError):
+ project_directory = None
+
+ # Find and load the management module for each installed app.
+ for app_name in apps:
+ try:
+ path = find_management_module(app_name)
+ _commands.update(dict([(name, app_name)
+ for name in find_commands(path)]))
+ except ImportError:
+ pass # No management module - ignore this app
+
+ if project_directory:
+ # Remove the "startproject" command from self.commands, because
+ # that's a django-admin.py command, not a manage.py command.
+ del _commands['startproject']
+
+ # Override the startapp command so that it always uses the
+ # project_directory, not the current working directory
+ # (which is default).
+ from google.appengine._internal.django.core.management.commands.startapp import ProjectCommand
+ _commands['startapp'] = ProjectCommand(project_directory)
+
+ return _commands
+
+def call_command(name, *args, **options):
+ """
+ Calls the given command, with the given options and args/kwargs.
+
+ This is the primary API you should use for calling specific commands.
+
+ Some examples:
+ call_command('syncdb')
+ call_command('shell', plain=True)
+ call_command('sqlall', 'myapp')
+ """
+ # Load the command object.
+ try:
+ app_name = get_commands()[name]
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, name)
+ except KeyError:
+ raise CommandError("Unknown command: %r" % name)
+
+ # Grab out a list of defaults from the options. optparse does this for us
+ # when the script runs from the command line, but since call_command can
+ # be called programatically, we need to simulate the loading and handling
+ # of defaults (see #10080 for details).
+ defaults = dict([(o.dest, o.default)
+ for o in klass.option_list
+ if o.default is not NO_DEFAULT])
+ defaults.update(options)
+
+ return klass.execute(*args, **defaults)
+
+class LaxOptionParser(OptionParser):
+ """
+ An option parser that doesn't raise any errors on unknown options.
+
+ This is needed because the --settings and --pythonpath options affect
+ the commands (and thus the options) that are available to the user.
+ """
+ def error(self, msg):
+ pass
+
+ def print_help(self):
+ """Output nothing.
+
+ The lax options are included in the normal option parser, so under
+ normal usage, we don't need to print the lax options.
+ """
+ pass
+
+ def print_lax_help(self):
+ """Output the basic options available to every command.
+
+ This just redirects to the default print_help() behaviour.
+ """
+ OptionParser.print_help(self)
+
+ def _process_args(self, largs, rargs, values):
+ """
+ Overrides OptionParser._process_args to exclusively handle default
+ options and ignore args and other options.
+
+ This overrides the behavior of the super class, which stop parsing
+ at the first unrecognized option.
+ """
+ while rargs:
+ arg = rargs[0]
+ try:
+ if arg[0:2] == "--" and len(arg) > 2:
+ # process a single long option (possibly with value(s))
+ # the superclass code pops the arg off rargs
+ self._process_long_opt(rargs, values)
+ elif arg[:1] == "-" and len(arg) > 1:
+ # process a cluster of short options (possibly with
+ # value(s) for the last one only)
+ # the superclass code pops the arg off rargs
+ self._process_short_opts(rargs, values)
+ else:
+ # it's either a non-default option or an arg
+ # either way, add it to the args list so we can keep
+ # dealing with options
+ del rargs[0]
+ raise Exception
+ except:
+ largs.append(arg)
+
+class ManagementUtility(object):
+ """
+ Encapsulates the logic of the django-admin.py and manage.py utilities.
+
+ A ManagementUtility has a number of commands, which can be manipulated
+ by editing the self.commands dictionary.
+ """
+ def __init__(self, argv=None):
+ self.argv = argv or sys.argv[:]
+ self.prog_name = os.path.basename(self.argv[0])
+
+ def main_help_text(self):
+ """
+ Returns the script's main help text, as a string.
+ """
+ usage = ['',"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,'']
+ usage.append('Available subcommands:')
+ commands = get_commands().keys()
+ commands.sort()
+ for cmd in commands:
+ usage.append(' %s' % cmd)
+ return '\n'.join(usage)
+
+ def fetch_command(self, subcommand):
+ """
+ Tries to fetch the given subcommand, printing a message with the
+ appropriate command called from the command line (usually
+ "django-admin.py" or "manage.py") if it can't be found.
+ """
+ try:
+ app_name = get_commands()[subcommand]
+ except KeyError:
+ sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" % (subcommand, self.prog_name))
+ sys.exit(1)
+ if isinstance(app_name, BaseCommand):
+ # If the command is already loaded, use it directly.
+ klass = app_name
+ else:
+ klass = load_command_class(app_name, subcommand)
+ return klass
+
+ def autocomplete(self):
+ """
+ Output completion suggestions for BASH.
+
+ The output of this function is passed to BASH's `COMREPLY` variable and
+ treated as completion suggestions. `COMREPLY` expects a space
+ separated string as the result.
+
+ The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
+ to get information about the cli input. Please refer to the BASH
+ man-page for more information about this variables.
+
+ Subcommand options are saved as pairs. A pair consists of
+ the long option string (e.g. '--exclude') and a boolean
+ value indicating if the option requires arguments. When printing to
+ stdout, a equal sign is appended to options which require arguments.
+
+ Note: If debugging this function, it is recommended to write the debug
+ output in a separate file. Otherwise the debug output will be treated
+ and formatted as potential completion suggestions.
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if not os.environ.has_key('DJANGO_AUTO_COMPLETE'):
+ return
+
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+
+ try:
+ curr = cwords[cword-1]
+ except IndexError:
+ curr = ''
+
+ subcommands = get_commands().keys() + ['help']
+ options = [('--help', None)]
+
+ # subcommand
+ if cword == 1:
+ print ' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands)))
+ # subcommand options
+ # special case: the 'help' subcommand has no options
+ elif cwords[0] in subcommands and cwords[0] != 'help':
+ subcommand_cls = self.fetch_command(cwords[0])
+ # special case: 'runfcgi' stores additional options as
+ # 'key=value' pairs
+ if cwords[0] == 'runfcgi':
+ from google.appengine._internal.django.core.servers.fastcgi import FASTCGI_OPTIONS
+ options += [(k, 1) for k in FASTCGI_OPTIONS]
+ # special case: add the names of installed apps to options
+ elif cwords[0] in ('dumpdata', 'reset', 'sql', 'sqlall',
+ 'sqlclear', 'sqlcustom', 'sqlindexes',
+ 'sqlreset', 'sqlsequencereset', 'test'):
+ try:
+ from google.appengine._internal.django.conf import settings
+ # Get the last part of the dotted path as the app name.
+ options += [(a.split('.')[-1], 0) for a in settings.INSTALLED_APPS]
+ except ImportError:
+ # Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
+ # user will find out once they execute the command.
+ pass
+ options += [(s_opt.get_opt_string(), s_opt.nargs) for s_opt in
+ subcommand_cls.option_list]
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
+ options = filter(lambda (x, v): x not in prev_opts, options)
+
+ # filter options by current input
+ options = sorted([(k, v) for k, v in options if k.startswith(curr)])
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1]:
+ opt_label += '='
+ print opt_label
+ sys.exit(1)
+
+ def execute(self):
+ """
+ Given the command-line arguments, this figures out which subcommand is
+ being run, creates a parser appropriate to that command, and runs it.
+ """
+ # Preprocess options to extract --settings and --pythonpath.
+ # These options could affect the commands that are available, so they
+ # must be processed early.
+ parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
+ version=get_version(),
+ option_list=BaseCommand.option_list)
+ self.autocomplete()
+ try:
+ options, args = parser.parse_args(self.argv)
+ handle_default_options(options)
+ except:
+ pass # Ignore any option errors at this point.
+
+ try:
+ subcommand = self.argv[1]
+ except IndexError:
+ subcommand = 'help' # Display help if no arguments were given.
+
+ if subcommand == 'help':
+ if len(args) > 2:
+ self.fetch_command(args[2]).print_help(self.prog_name, args[2])
+ else:
+ parser.print_lax_help()
+ sys.stderr.write(self.main_help_text() + '\n')
+ sys.exit(1)
+ # Special-cases: We want 'django-admin.py --version' and
+ # 'django-admin.py --help' to work, for backwards compatibility.
+ elif self.argv[1:] == ['--version']:
+ # LaxOptionParser already takes care of printing the version.
+ pass
+ elif self.argv[1:] == ['--help']:
+ parser.print_lax_help()
+ sys.stderr.write(self.main_help_text() + '\n')
+ else:
+ self.fetch_command(subcommand).run_from_argv(self.argv)
+
+def setup_environ(settings_mod, original_settings_path=None):
+ """
+ Configures the runtime environment. This can also be used by external
+ scripts wanting to set up a similar environment to manage.py.
+ Returns the project directory (assuming the passed settings module is
+ directly in the project directory).
+
+ The "original_settings_path" parameter is optional, but recommended, since
+ trying to work out the original path from the module can be problematic.
+ """
+ # Add this project to sys.path so that it's importable in the conventional
+ # way. For example, if this file (manage.py) lives in a directory
+ # "myproject", this code would add "/path/to/myproject" to sys.path.
+ if '__init__.py' in settings_mod.__file__:
+ p = os.path.dirname(settings_mod.__file__)
+ else:
+ p = settings_mod.__file__
+ project_directory, settings_filename = os.path.split(p)
+ if project_directory == os.curdir or not project_directory:
+ project_directory = os.getcwd()
+ project_name = os.path.basename(project_directory)
+
+ # Strip filename suffix to get the module name.
+ settings_name = os.path.splitext(settings_filename)[0]
+
+ # Strip $py for Jython compiled files (like settings$py.class)
+ if settings_name.endswith("$py"):
+ settings_name = settings_name[:-3]
+
+ # Set DJANGO_SETTINGS_MODULE appropriately.
+ if original_settings_path:
+ os.environ['DJANGO_SETTINGS_MODULE'] = original_settings_path
+ else:
+ os.environ['DJANGO_SETTINGS_MODULE'] = '%s.%s' % (project_name, settings_name)
+
+ # Import the project module. We add the parent directory to PYTHONPATH to
+ # avoid some of the path errors new users can have.
+ sys.path.append(os.path.join(project_directory, os.pardir))
+ project_module = import_module(project_name)
+ sys.path.pop()
+
+ return project_directory
+
+def execute_from_command_line(argv=None):
+ """
+ A simple method that runs a ManagementUtility.
+ """
+ utility = ManagementUtility(argv)
+ utility.execute()
+
+def execute_manager(settings_mod, argv=None):
+ """
+ Like execute_from_command_line(), but for use by manage.py, a
+ project-specific django-admin.py utility.
+ """
+ setup_environ(settings_mod)
+ utility = ManagementUtility(argv)
+ utility.execute()
diff --git a/google/appengine/_internal/django/core/management/base.py b/google/appengine/_internal/django/core/management/base.py
new file mode 100644
index 0000000..42a3459
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/base.py
@@ -0,0 +1,431 @@
+"""
+Base classes for writing management commands (named commands which can
+be executed through ``django-admin.py`` or ``manage.py``).
+
+"""
+
+import os
+import sys
+from optparse import make_option, OptionParser
+
+import django
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.core.management.color import color_style
+from google.appengine._internal.django.utils.encoding import smart_str
+
+class CommandError(Exception):
+ """
+ Exception class indicating a problem while executing a management
+ command.
+
+ If this exception is raised during the execution of a management
+ command, it will be caught and turned into a nicely-printed error
+ message to the appropriate output stream (i.e., stderr); as a
+ result, raising this exception (with a sensible description of the
+ error) is the preferred way to indicate that something has gone
+ wrong in the execution of a command.
+
+ """
+ pass
+
+def handle_default_options(options):
+ """
+ Include any default options that all commands should accept here
+ so that ManagementUtility can handle them before searching for
+ user commands.
+
+ """
+ if options.settings:
+ os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
+ if options.pythonpath:
+ sys.path.insert(0, options.pythonpath)
+
+class BaseCommand(object):
+ """
+ The base class from which all management commands ultimately
+ derive.
+
+ Use this class if you want access to all of the mechanisms which
+ parse the command-line arguments and work out what code to call in
+ response; if you don't need to change any of that behavior,
+ consider using one of the subclasses defined in this file.
+
+ If you are interested in overriding/customizing various aspects of
+ the command-parsing and -execution behavior, the normal flow works
+ as follows:
+
+ 1. ``django-admin.py`` or ``manage.py`` loads the command class
+ and calls its ``run_from_argv()`` method.
+
+ 2. The ``run_from_argv()`` method calls ``create_parser()`` to get
+ an ``OptionParser`` for the arguments, parses them, performs
+ any environment changes requested by options like
+ ``pythonpath``, and then calls the ``execute()`` method,
+ passing the parsed arguments.
+
+ 3. The ``execute()`` method attempts to carry out the command by
+ calling the ``handle()`` method with the parsed arguments; any
+ output produced by ``handle()`` will be printed to standard
+ output and, if the command is intended to produce a block of
+ SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
+
+ 4. If ``handle()`` raised a ``CommandError``, ``execute()`` will
+ instead print an error message to ``stderr``.
+
+ Thus, the ``handle()`` method is typically the starting point for
+ subclasses; many built-in commands and command types either place
+ all of their logic in ``handle()``, or perform some additional
+ parsing work in ``handle()`` and then delegate from it to more
+ specialized methods as needed.
+
+ Several attributes affect behavior at various steps along the way:
+
+ ``args``
+ A string listing the arguments accepted by the command,
+ suitable for use in help messages; e.g., a command which takes
+ a list of application names might set this to '<appname
+ appname ...>'.
+
+ ``can_import_settings``
+ A boolean indicating whether the command needs to be able to
+ import Django settings; if ``True``, ``execute()`` will verify
+ that this is possible before proceeding. Default value is
+ ``True``.
+
+ ``help``
+ A short description of the command, which will be printed in
+ help messages.
+
+ ``option_list``
+ This is the list of ``optparse`` options which will be fed
+ into the command's ``OptionParser`` for parsing arguments.
+
+ ``output_transaction``
+ A boolean indicating whether the command outputs SQL
+ statements; if ``True``, the output will automatically be
+ wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
+ ``False``.
+
+ ``requires_model_validation``
+ A boolean; if ``True``, validation of installed models will be
+ performed prior to executing the command. Default value is
+ ``True``. To validate an individual application's models
+ rather than all applications' models, call
+ ``self.validate(app)`` from ``handle()``, where ``app`` is the
+ application's Python module.
+
+ """
+ # Metadata about this command.
+ option_list = (
+ make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ make_option('--settings',
+ help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
+ make_option('--pythonpath',
+ help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
+ make_option('--traceback', action='store_true',
+ help='Print traceback on exception'),
+ )
+ help = ''
+ args = ''
+
+ # Configuration shortcuts that alter various logic.
+ can_import_settings = True
+ requires_model_validation = True
+ output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
+
+ def __init__(self):
+ self.style = color_style()
+
+ def get_version(self):
+ """
+ Return the Django version, which should be correct for all
+ built-in Django commands. User-supplied commands should
+ override this method.
+
+ """
+ return django.get_version()
+
+ def usage(self, subcommand):
+ """
+ Return a brief description of how to use this command, by
+ default from the attribute ``self.help``.
+
+ """
+ usage = '%%prog %s [options] %s' % (subcommand, self.args)
+ if self.help:
+ return '%s\n\n%s' % (usage, self.help)
+ else:
+ return usage
+
+ def create_parser(self, prog_name, subcommand):
+ """
+ Create and return the ``OptionParser`` which will be used to
+ parse the arguments to this command.
+
+ """
+ return OptionParser(prog=prog_name,
+ usage=self.usage(subcommand),
+ version=self.get_version(),
+ option_list=self.option_list)
+
+ def print_help(self, prog_name, subcommand):
+ """
+ Print the help message for this command, derived from
+ ``self.usage()``.
+
+ """
+ parser = self.create_parser(prog_name, subcommand)
+ parser.print_help()
+
+ def run_from_argv(self, argv):
+ """
+ Set up any environment changes requested (e.g., Python path
+ and Django settings), then run this command.
+
+ """
+ parser = self.create_parser(argv[0], argv[1])
+ options, args = parser.parse_args(argv[2:])
+ handle_default_options(options)
+ self.execute(*args, **options.__dict__)
+
+ def execute(self, *args, **options):
+ """
+ Try to execute this command, performing model validation if
+ needed (as controlled by the attribute
+ ``self.requires_model_validation``). If the command raises a
+ ``CommandError``, intercept it and print it sensibly to
+ stderr.
+
+ """
+ # Switch to English, because django-admin.py creates database content
+ # like permissions, and those shouldn't contain any translations.
+ # But only do this if we can assume we have a working settings file,
+ # because django.utils.translation requires settings.
+ if self.can_import_settings:
+ try:
+ from google.appengine._internal.django.utils import translation
+ translation.activate('en-us')
+ except ImportError, e:
+ # If settings should be available, but aren't,
+ # raise the error and quit.
+ sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
+ sys.exit(1)
+ try:
+ self.stdout = options.get('stdout', sys.stdout)
+ self.stderr = options.get('stderr', sys.stderr)
+ if self.requires_model_validation:
+ self.validate()
+ output = self.handle(*args, **options)
+ if output:
+ if self.output_transaction:
+ # This needs to be imported here, because it relies on
+ # settings.
+ from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+ connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
+ if connection.ops.start_transaction_sql():
+ self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()) + '\n')
+ self.stdout.write(output)
+ if self.output_transaction:
+ self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;") + '\n')
+ except CommandError, e:
+ self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
+ sys.exit(1)
+
+ def validate(self, app=None, display_num_errors=False):
+ """
+ Validates the given app, raising CommandError for any errors.
+
+ If app is None, then this will validate all installed apps.
+
+ """
+ from google.appengine._internal.django.core.management.validation import get_validation_errors
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+ s = StringIO()
+ num_errors = get_validation_errors(s, app)
+ if num_errors:
+ s.seek(0)
+ error_text = s.read()
+ raise CommandError("One or more models did not validate:\n%s" % error_text)
+ if display_num_errors:
+ self.stdout.write("%s error%s found\n" % (num_errors, num_errors != 1 and 's' or ''))
+
+ def handle(self, *args, **options):
+ """
+ The actual logic of the command. Subclasses must implement
+ this method.
+
+ """
+ raise NotImplementedError()
+
+class AppCommand(BaseCommand):
+ """
+ A management command which takes one or more installed application
+ names as arguments, and does something with each of them.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_app()``, which will be called once for each application.
+
+ """
+ args = '<appname appname ...>'
+
+ def handle(self, *app_labels, **options):
+ from google.appengine._internal.django.db import models
+ if not app_labels:
+ raise CommandError('Enter at least one appname.')
+ try:
+ app_list = [models.get_app(app_label) for app_label in app_labels]
+ except (ImproperlyConfigured, ImportError), e:
+ raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
+ output = []
+ for app in app_list:
+ app_output = self.handle_app(app, **options)
+ if app_output:
+ output.append(app_output)
+ return '\n'.join(output)
+
+ def handle_app(self, app, **options):
+ """
+ Perform the command's actions for ``app``, which will be the
+ Python module corresponding to an application name given on
+ the command line.
+
+ """
+ raise NotImplementedError()
+
+class LabelCommand(BaseCommand):
+ """
+ A management command which takes one or more arbitrary arguments
+ (labels) on the command line, and does something with each of
+ them.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_label()``, which will be called once for each label.
+
+ If the arguments should be names of installed applications, use
+ ``AppCommand`` instead.
+
+ """
+ args = '<label label ...>'
+ label = 'label'
+
+ def handle(self, *labels, **options):
+ if not labels:
+ raise CommandError('Enter at least one %s.' % self.label)
+
+ output = []
+ for label in labels:
+ label_output = self.handle_label(label, **options)
+ if label_output:
+ output.append(label_output)
+ return '\n'.join(output)
+
+ def handle_label(self, label, **options):
+ """
+ Perform the command's actions for ``label``, which will be the
+ string as given on the command line.
+
+ """
+ raise NotImplementedError()
+
+class NoArgsCommand(BaseCommand):
+ """
+ A command which takes no arguments on the command line.
+
+ Rather than implementing ``handle()``, subclasses must implement
+ ``handle_noargs()``; ``handle()`` itself is overridden to ensure
+ no arguments are passed to the command.
+
+ Attempting to pass arguments will raise ``CommandError``.
+
+ """
+ args = ''
+
+ def handle(self, *args, **options):
+ if args:
+ raise CommandError("Command doesn't accept any arguments")
+ return self.handle_noargs(**options)
+
+ def handle_noargs(self, **options):
+ """
+ Perform this command's actions.
+
+ """
+ raise NotImplementedError()
+
+def copy_helper(style, app_or_project, name, directory, other_name=''):
+ """
+ Copies either a Django application layout template or a Django project
+ layout template into the specified directory.
+
+ """
+ # style -- A color style object (see django.core.management.color).
+ # app_or_project -- The string 'app' or 'project'.
+ # name -- The name of the application or project.
+ # directory -- The directory to which the layout template should be copied.
+ # other_name -- When copying an application layout, this should be the name
+ # of the project.
+ import re
+ import shutil
+ other = {'project': 'app', 'app': 'project'}[app_or_project]
+ if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name.
+ # Provide a smart error message, depending on the error.
+ if not re.search(r'^[_a-zA-Z]', name):
+ message = 'make sure the name begins with a letter or underscore'
+ else:
+ message = 'use only numbers, letters and underscores'
+ raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))
+ top_dir = os.path.join(directory, name)
+ try:
+ os.mkdir(top_dir)
+ except OSError, e:
+ raise CommandError(e)
+
+ # Determine where the app or project templates are. Use
+ # django.__path__[0] because we don't know into which directory
+ # django has been installed.
+ template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
+
+ for d, subdirs, files in os.walk(template_dir):
+ relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
+ if relative_dir:
+ os.mkdir(os.path.join(top_dir, relative_dir))
+ for subdir in subdirs[:]:
+ if subdir.startswith('.'):
+ subdirs.remove(subdir)
+ for f in files:
+ if not f.endswith('.py'):
+ # Ignore .pyc, .pyo, .py.class etc, as they cause various
+ # breakages.
+ continue
+ path_old = os.path.join(d, f)
+ path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
+ fp_old = open(path_old, 'r')
+ fp_new = open(path_new, 'w')
+ fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
+ fp_old.close()
+ fp_new.close()
+ try:
+ shutil.copymode(path_old, path_new)
+ _make_writeable(path_new)
+ except OSError:
+ sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
+
+def _make_writeable(filename):
+ """
+ Make sure that the file is writeable. Useful if our source is
+ read-only.
+
+ """
+ import stat
+ if sys.platform.startswith('java'):
+ # On Jython there is no os.access()
+ return
+ if not os.access(filename, os.W_OK):
+ st = os.stat(filename)
+ new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
+ os.chmod(filename, new_permissions)
diff --git a/google/appengine/_internal/django/core/management/color.py b/google/appengine/_internal/django/core/management/color.py
new file mode 100644
index 0000000..59a9ecb
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/color.py
@@ -0,0 +1,50 @@
+"""
+Sets up the terminal color scheme.
+"""
+
+import os
+import sys
+
+from google.appengine._internal.django.utils import termcolors
+
+def supports_color():
+ """
+ Returns True if the running system's terminal supports color, and False
+ otherwise.
+ """
+ unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
+ # isatty is not always implemented, #6223.
+ is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
+ if unsupported_platform or not is_a_tty:
+ return False
+ return True
+
+def color_style():
+ """Returns a Style object with the Django color scheme."""
+ if not supports_color():
+ style = no_style()
+ else:
+ DJANGO_COLORS = os.environ.get('DJANGO_COLORS', '')
+ color_settings = termcolors.parse_color_setting(DJANGO_COLORS)
+ if color_settings:
+ class dummy: pass
+ style = dummy()
+ # The nocolor palette has all available roles.
+ # Use that pallete as the basis for populating
+ # the palette as defined in the environment.
+ for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
+ format = color_settings.get(role,{})
+ setattr(style, role, termcolors.make_style(**format))
+ # For backwards compatibility,
+ # set style for ERROR_OUTPUT == ERROR
+ style.ERROR_OUTPUT = style.ERROR
+ else:
+ style = no_style()
+ return style
+
+def no_style():
+ """Returns a Style object that has no colors."""
+ class dummy:
+ def __getattr__(self, attr):
+ return lambda x: x
+ return dummy()
diff --git a/google/appengine/_internal/django/core/management/commands/__init__.py b/google/appengine/_internal/django/core/management/commands/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/__init__.py
diff --git a/google/appengine/_internal/django/core/management/commands/cleanup.py b/google/appengine/_internal/django/core/management/commands/cleanup.py
new file mode 100644
index 0000000..83f0216
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/cleanup.py
@@ -0,0 +1,11 @@
+import datetime
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Can be run as a cronjob or directly to clean out old data from the database (only expired sessions at the moment)."
+
+ def handle_noargs(self, **options):
+ from google.appengine._internal.django.db import transaction
+ from google.appengine._internal.django.contrib.sessions.models import Session
+ Session.objects.filter(expire_date__lt=datetime.datetime.now()).delete()
+ transaction.commit_unless_managed()
diff --git a/google/appengine/_internal/django/core/management/commands/compilemessages.py b/google/appengine/_internal/django/core/management/commands/compilemessages.py
new file mode 100644
index 0000000..64785ed
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/compilemessages.py
@@ -0,0 +1,61 @@
+import codecs
+import os
+import sys
+from optparse import make_option
+from google.appengine._internal.django.core.management.base import BaseCommand, CommandError
+
+def has_bom(fn):
+ f = open(fn, 'r')
+ sample = f.read(4)
+ return sample[:3] == '\xef\xbb\xbf' or sample.startswith(codecs.BOM_UTF16_LE) or sample.startswith(codecs.BOM_UTF16_BE)
+
+def compile_messages(stderr, locale=None):
+ basedirs = [os.path.join('conf', 'locale'), 'locale']
+ if os.environ.get('DJANGO_SETTINGS_MODULE'):
+ from google.appengine._internal.django.conf import settings
+ basedirs.extend(settings.LOCALE_PATHS)
+
+ # Gather existing directories.
+ basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
+
+ if not basedirs:
+ raise CommandError("This script should be run from the Django SVN tree or your project or app tree, or with the settings module specified.")
+
+ for basedir in basedirs:
+ if locale:
+ basedir = os.path.join(basedir, locale, 'LC_MESSAGES')
+ for dirpath, dirnames, filenames in os.walk(basedir):
+ for f in filenames:
+ if f.endswith('.po'):
+ stderr.write('processing file %s in %s\n' % (f, dirpath))
+ fn = os.path.join(dirpath, f)
+ if has_bom(fn):
+ raise CommandError("The %s file has a BOM (Byte Order Mark). Django only supports .po files encoded in UTF-8 and without any BOM." % fn)
+ pf = os.path.splitext(fn)[0]
+ # Store the names of the .mo and .po files in an environment
+ # variable, rather than doing a string replacement into the
+ # command, so that we can take advantage of shell quoting, to
+ # quote any malicious characters/escaping.
+ # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html
+ os.environ['djangocompilemo'] = pf + '.mo'
+ os.environ['djangocompilepo'] = pf + '.po'
+ if sys.platform == 'win32': # Different shell-variable syntax
+ cmd = 'msgfmt --check-format -o "%djangocompilemo%" "%djangocompilepo%"'
+ else:
+ cmd = 'msgfmt --check-format -o "$djangocompilemo" "$djangocompilepo"'
+ os.system(cmd)
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--locale', '-l', dest='locale',
+ help='The locale to process. Default is to process all.'),
+ )
+ help = 'Compiles .po files to .mo files for use with builtin gettext support.'
+
+ requires_model_validation = False
+ can_import_settings = False
+
+ def handle(self, **options):
+ locale = options.get('locale')
+ compile_messages(self.stderr, locale=locale)
diff --git a/google/appengine/_internal/django/core/management/commands/createcachetable.py b/google/appengine/_internal/django/core/management/commands/createcachetable.py
new file mode 100644
index 0000000..7cdae57
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/createcachetable.py
@@ -0,0 +1,52 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import LabelCommand
+from google.appengine._internal.django.db import connections, transaction, models, DEFAULT_DB_ALIAS
+
+class Command(LabelCommand):
+ help = "Creates the table needed to use the SQL cache backend."
+ args = "<tablename>"
+ label = 'tablename'
+
+ option_list = LabelCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database onto '
+ 'which the cache table will be installed. '
+ 'Defaults to the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ def handle_label(self, tablename, **options):
+ alias = options.get('database', DEFAULT_DB_ALIAS)
+ connection = connections[alias]
+ fields = (
+ # "key" is a reserved word in MySQL, so use "cache_key" instead.
+ models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
+ models.TextField(name='value'),
+ models.DateTimeField(name='expires', db_index=True),
+ )
+ table_output = []
+ index_output = []
+ qn = connection.ops.quote_name
+ for f in fields:
+ field_output = [qn(f.name), f.db_type(connection=connection)]
+ field_output.append("%sNULL" % (not f.null and "NOT " or ""))
+ if f.primary_key:
+ field_output.append("PRIMARY KEY")
+ elif f.unique:
+ field_output.append("UNIQUE")
+ if f.db_index:
+ unique = f.unique and "UNIQUE " or ""
+ index_output.append("CREATE %sINDEX %s ON %s (%s);" % (unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
+ qn(f.name)))
+ table_output.append(" ".join(field_output))
+ full_statement = ["CREATE TABLE %s (" % qn(tablename)]
+ for i, line in enumerate(table_output):
+ full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
+ full_statement.append(');')
+ curs = connection.cursor()
+ curs.execute("\n".join(full_statement))
+ for statement in index_output:
+ curs.execute(statement)
+ transaction.commit_unless_managed(using=alias)
diff --git a/google/appengine/_internal/django/core/management/commands/dbshell.py b/google/appengine/_internal/django/core/management/commands/dbshell.py
new file mode 100644
index 0000000..9092470
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/dbshell.py
@@ -0,0 +1,27 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import BaseCommand, CommandError
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(BaseCommand):
+ help = ("Runs the command-line client for specified database, or the "
+ "default database if none is provided.")
+
+ option_list = BaseCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
+ 'open a shell. Defaults to the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ def handle(self, **options):
+ connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
+ try:
+ connection.client.runshell()
+ except OSError:
+ # Note that we're assuming OSError means that the client program
+ # isn't installed. There's a possibility OSError would be raised
+ # for some other reason, in which case this error message would be
+ # inaccurate. Still, this message catches the common case.
+ raise CommandError('You appear not to have the %r program installed or on your path.' % connection.client.executable_name)
diff --git a/google/appengine/_internal/django/core/management/commands/diffsettings.py b/google/appengine/_internal/django/core/management/commands/diffsettings.py
new file mode 100644
index 0000000..abcdd3e
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/diffsettings.py
@@ -0,0 +1,32 @@
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+
+def module_to_dict(module, omittable=lambda k: k.startswith('_')):
+ "Converts a module namespace to a Python dictionary. Used by get_settings_diff."
+ return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
+
+class Command(NoArgsCommand):
+ help = """Displays differences between the current settings.py and Django's
+ default settings. Settings that don't appear in the defaults are
+ followed by "###"."""
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ # Inspired by Postfix's "postconf -n".
+ from google.appengine._internal.django.conf import settings, global_settings
+
+ # Because settings are imported lazily, we need to explicitly load them.
+ settings._setup()
+
+ user_settings = module_to_dict(settings._wrapped)
+ default_settings = module_to_dict(global_settings)
+
+ output = []
+ keys = user_settings.keys()
+ keys.sort()
+ for key in keys:
+ if key not in default_settings:
+ output.append("%s = %s ###" % (key, user_settings[key]))
+ elif user_settings[key] != default_settings[key]:
+ output.append("%s = %s" % (key, user_settings[key]))
+ return '\n'.join(output)
diff --git a/google/appengine/_internal/django/core/management/commands/dumpdata.py b/google/appengine/_internal/django/core/management/commands/dumpdata.py
new file mode 100644
index 0000000..03cd0c8
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/dumpdata.py
@@ -0,0 +1,167 @@
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.core.management.base import BaseCommand, CommandError
+from google.appengine._internal.django.core import serializers
+from google.appengine._internal.django.db import connections, router, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.utils.datastructures import SortedDict
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--format', default='json', dest='format',
+ help='Specifies the output serialization format for fixtures.'),
+ make_option('--indent', default=None, dest='indent', type='int',
+ help='Specifies the indent level to use when pretty-printing output'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
+ 'fixtures into. Defaults to the "default" database.'),
+ make_option('-e', '--exclude', dest='exclude',action='append', default=[],
+ help='App to exclude (use multiple --exclude to exclude multiple apps).'),
+ make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
+ help='Use natural keys if they are available.'),
+ )
+ help = ("Output the contents of the database as a fixture of the given "
+ "format (using each model's default manager).")
+ args = '[appname appname.ModelName ...]'
+
+ def handle(self, *app_labels, **options):
+ from google.appengine._internal.django.db.models import get_app, get_apps, get_models, get_model
+
+ format = options.get('format','json')
+ indent = options.get('indent',None)
+ using = options.get('database', DEFAULT_DB_ALIAS)
+ connection = connections[using]
+ exclude = options.get('exclude',[])
+ show_traceback = options.get('traceback', False)
+ use_natural_keys = options.get('use_natural_keys', False)
+
+ excluded_apps = set(get_app(app_label) for app_label in exclude)
+
+ if len(app_labels) == 0:
+ app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
+ else:
+ app_list = SortedDict()
+ for label in app_labels:
+ try:
+ app_label, model_label = label.split('.')
+ try:
+ app = get_app(app_label)
+ except ImproperlyConfigured:
+ raise CommandError("Unknown application: %s" % app_label)
+
+ model = get_model(app_label, model_label)
+ if model is None:
+ raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
+
+ if app in app_list.keys():
+ if app_list[app] and model not in app_list[app]:
+ app_list[app].append(model)
+ else:
+ app_list[app] = [model]
+ except ValueError:
+ # This is just an app - no model qualifier
+ app_label = label
+ try:
+ app = get_app(app_label)
+ except ImproperlyConfigured:
+ raise CommandError("Unknown application: %s" % app_label)
+ app_list[app] = None
+
+ # Check that the serialization format exists; this is a shortcut to
+ # avoid collating all the objects and _then_ failing.
+ if format not in serializers.get_public_serializer_formats():
+ raise CommandError("Unknown serialization format: %s" % format)
+
+ try:
+ serializers.get_serializer(format)
+ except KeyError:
+ raise CommandError("Unknown serialization format: %s" % format)
+
+ # Now collate the objects to be serialized.
+ objects = []
+ for model in sort_dependencies(app_list.items()):
+ if not model._meta.proxy and router.allow_syncdb(using, model):
+ objects.extend(model._default_manager.using(using).all())
+
+ try:
+ return serializers.serialize(format, objects, indent=indent,
+ use_natural_keys=use_natural_keys)
+ except Exception, e:
+ if show_traceback:
+ raise
+ raise CommandError("Unable to serialize database: %s" % e)
+
+def sort_dependencies(app_list):
+ """Sort a list of app,modellist pairs into a single list of models.
+
+ The single list of models is sorted so that any model with a natural key
+ is serialized before a normal model, and any model with a natural key
+ dependency has it's dependencies serialized first.
+ """
+ from google.appengine._internal.django.db.models import get_model, get_models
+ # Process the list of models, and get the list of dependencies
+ model_dependencies = []
+ models = set()
+ for app, model_list in app_list:
+ if model_list is None:
+ model_list = get_models(app)
+
+ for model in model_list:
+ models.add(model)
+ # Add any explicitly defined dependencies
+ if hasattr(model, 'natural_key'):
+ deps = getattr(model.natural_key, 'dependencies', [])
+ if deps:
+ deps = [get_model(*d.split('.')) for d in deps]
+ else:
+ deps = []
+
+ # Now add a dependency for any FK or M2M relation with
+ # a model that defines a natural key
+ for field in model._meta.fields:
+ if hasattr(field.rel, 'to'):
+ rel_model = field.rel.to
+ if hasattr(rel_model, 'natural_key'):
+ deps.append(rel_model)
+ for field in model._meta.many_to_many:
+ rel_model = field.rel.to
+ if hasattr(rel_model, 'natural_key'):
+ deps.append(rel_model)
+ model_dependencies.append((model, deps))
+
+ model_dependencies.reverse()
+ # Now sort the models to ensure that dependencies are met. This
+ # is done by repeatedly iterating over the input list of models.
+ # If all the dependencies of a given model are in the final list,
+ # that model is promoted to the end of the final list. This process
+ # continues until the input list is empty, or we do a full iteration
+ # over the input models without promoting a model to the final list.
+ # If we do a full iteration without a promotion, that means there are
+ # circular dependencies in the list.
+ model_list = []
+ while model_dependencies:
+ skipped = []
+ changed = False
+ while model_dependencies:
+ model, deps = model_dependencies.pop()
+
+ # If all of the models in the dependency list are either already
+ # on the final model list, or not on the original serialization list,
+ # then we've found another model with all it's dependencies satisfied.
+ found = True
+ for candidate in ((d not in models or d in model_list) for d in deps):
+ if not candidate:
+ found = False
+ if found:
+ model_list.append(model)
+ changed = True
+ else:
+ skipped.append((model, deps))
+ if not changed:
+ raise CommandError("Can't resolve dependencies for %s in serialized app list." %
+ ', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)
+ for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))
+ )
+ model_dependencies = skipped
+
+ return model_list
diff --git a/google/appengine/_internal/django/core/management/commands/flush.py b/google/appengine/_internal/django/core/management/commands/flush.py
new file mode 100644
index 0000000..00f7c53
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/flush.py
@@ -0,0 +1,83 @@
+from optparse import make_option
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.core.management import call_command
+from google.appengine._internal.django.core.management.base import NoArgsCommand, CommandError
+from google.appengine._internal.django.core.management.color import no_style
+from google.appengine._internal.django.core.management.sql import sql_flush, emit_post_sync_signal
+from google.appengine._internal.django.utils.importlib import import_module
+
+
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to flush. '
+ 'Defaults to the "default" database.'),
+ )
+ help = "Executes ``sqlflush`` on the current database."
+
+ def handle_noargs(self, **options):
+ db = options.get('database', DEFAULT_DB_ALIAS)
+ connection = connections[db]
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive')
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError:
+ pass
+
+ sql_list = sql_flush(self.style, connection, only_django=True)
+
+ if interactive:
+ confirm = raw_input("""You have requested a flush of the database.
+This will IRREVERSIBLY DESTROY all data currently in the %r database,
+and return each table to the state it was in after syncdb.
+Are you sure you want to do this?
+
+ Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ transaction.rollback_unless_managed(using=db)
+ raise CommandError("""Database %s couldn't be flushed. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the expected database tables doesn't exist.
+ * The SQL was invalid.
+Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
+The full error: %s""" % (connection.settings_dict['NAME'], e))
+ transaction.commit_unless_managed(using=db)
+
+ # Emit the post sync signal. This allows individual
+ # applications to respond as if the database had been
+ # sync'd from scratch.
+ all_models = []
+ for app in models.get_apps():
+ all_models.extend([
+ m for m in models.get_models(app, include_auto_created=True)
+ if router.allow_syncdb(db, m)
+ ])
+ emit_post_sync_signal(all_models, verbosity, interactive, db)
+
+ # Reinstall the initial_data fixture.
+ kwargs = options.copy()
+ kwargs['database'] = db
+ call_command('loaddata', 'initial_data', **kwargs)
+
+ else:
+ print "Flush cancelled."
diff --git a/google/appengine/_internal/django/core/management/commands/inspectdb.py b/google/appengine/_internal/django/core/management/commands/inspectdb.py
new file mode 100644
index 0000000..3b01695
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/inspectdb.py
@@ -0,0 +1,167 @@
+import keyword
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import NoArgsCommand, CommandError
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(NoArgsCommand):
+ help = "Introspects the database tables in the given database and outputs a Django model module."
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to '
+ 'introspect. Defaults to using the "default" database.'),
+ )
+
+ requires_model_validation = False
+
+ db_module = 'django.db'
+
+ def handle_noargs(self, **options):
+ try:
+ for line in self.handle_inspection(options):
+ self.stdout.write("%s\n" % line)
+ except NotImplementedError:
+ raise CommandError("Database inspection isn't supported for the currently selected database backend.")
+
+ def handle_inspection(self, options):
+ connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
+
+ table2model = lambda table_name: table_name.title().replace('_', '').replace(' ', '').replace('-', '')
+
+ cursor = connection.cursor()
+ yield "# This is an auto-generated Django model module."
+ yield "# You'll have to do the following manually to clean this up:"
+ yield "# * Rearrange models' order"
+ yield "# * Make sure each model has one field with primary_key=True"
+ yield "# Feel free to rename the models, but don't rename db_table values or field names."
+ yield "#"
+ yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
+ yield "# into your database."
+ yield ''
+ yield 'from %s import models' % self.db_module
+ yield ''
+ for table_name in connection.introspection.get_table_list(cursor):
+ yield 'class %s(models.Model):' % table2model(table_name)
+ try:
+ relations = connection.introspection.get_relations(cursor, table_name)
+ except NotImplementedError:
+ relations = {}
+ try:
+ indexes = connection.introspection.get_indexes(cursor, table_name)
+ except NotImplementedError:
+ indexes = {}
+ for i, row in enumerate(connection.introspection.get_table_description(cursor, table_name)):
+ column_name = row[0]
+ att_name = column_name.lower()
+ comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
+ extra_params = {} # Holds Field parameters such as 'db_column'.
+
+ # If the column name can't be used verbatim as a Python
+ # attribute, set the "db_column" for this Field.
+ if ' ' in att_name or '-' in att_name or keyword.iskeyword(att_name) or column_name != att_name:
+ extra_params['db_column'] = column_name
+
+ # Modify the field name to make it Python-compatible.
+ if ' ' in att_name:
+ att_name = att_name.replace(' ', '_')
+ comment_notes.append('Field renamed to remove spaces.')
+
+ if '-' in att_name:
+ att_name = att_name.replace('-', '_')
+ comment_notes.append('Field renamed to remove dashes.')
+
+ if column_name != att_name:
+ comment_notes.append('Field name made lowercase.')
+
+ if i in relations:
+ rel_to = relations[i][1] == table_name and "'self'" or table2model(relations[i][1])
+ field_type = 'ForeignKey(%s' % rel_to
+ if att_name.endswith('_id'):
+ att_name = att_name[:-3]
+ else:
+ extra_params['db_column'] = column_name
+ else:
+ # Calling `get_field_type` to get the field type string and any
+ # additional paramters and notes.
+ field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
+ extra_params.update(field_params)
+ comment_notes.extend(field_notes)
+
+ # Add primary_key and unique, if necessary.
+ if column_name in indexes:
+ if indexes[column_name]['primary_key']:
+ extra_params['primary_key'] = True
+ elif indexes[column_name]['unique']:
+ extra_params['unique'] = True
+
+ field_type += '('
+
+ if keyword.iskeyword(att_name):
+ att_name += '_field'
+ comment_notes.append('Field renamed because it was a Python reserved word.')
+
+ # Don't output 'id = meta.AutoField(primary_key=True)', because
+ # that's assumed if it doesn't exist.
+ if att_name == 'id' and field_type == 'AutoField(' and extra_params == {'primary_key': True}:
+ continue
+
+ # Add 'null' and 'blank', if the 'null_ok' flag was present in the
+ # table description.
+ if row[6]: # If it's NULL...
+ extra_params['blank'] = True
+ if not field_type in ('TextField(', 'CharField('):
+ extra_params['null'] = True
+
+ field_desc = '%s = models.%s' % (att_name, field_type)
+ if extra_params:
+ if not field_desc.endswith('('):
+ field_desc += ', '
+ field_desc += ', '.join(['%s=%r' % (k, v) for k, v in extra_params.items()])
+ field_desc += ')'
+ if comment_notes:
+ field_desc += ' # ' + ' '.join(comment_notes)
+ yield ' %s' % field_desc
+ for meta_line in self.get_meta(table_name):
+ yield meta_line
+
+ def get_field_type(self, connection, table_name, row):
+ """
+ Given the database connection, the table name, and the cursor row
+ description, this routine will return the given field type name, as
+ well as any additional keyword parameters and notes for the field.
+ """
+ field_params = {}
+ field_notes = []
+
+ try:
+ field_type = connection.introspection.get_field_type(row[1], row)
+ except KeyError:
+ field_type = 'TextField'
+ field_notes.append('This field type is a guess.')
+
+ # This is a hook for DATA_TYPES_REVERSE to return a tuple of
+ # (field_type, field_params_dict).
+ if type(field_type) is tuple:
+ field_type, new_params = field_type
+ field_params.update(new_params)
+
+ # Add max_length for all CharFields.
+ if field_type == 'CharField' and row[3]:
+ field_params['max_length'] = row[3]
+
+ if field_type == 'DecimalField':
+ field_params['max_digits'] = row[4]
+ field_params['decimal_places'] = row[5]
+
+ return field_type, field_params, field_notes
+
+ def get_meta(self, table_name):
+ """
+ Return a sequence comprising the lines of code necessary
+ to construct the inner Meta class for the model corresponding
+ to the given database table name.
+ """
+ return [' class Meta:',
+ ' db_table = %r' % table_name,
+ '']
diff --git a/google/appengine/_internal/django/core/management/commands/loaddata.py b/google/appengine/_internal/django/core/management/commands/loaddata.py
new file mode 100644
index 0000000..c150240
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/loaddata.py
@@ -0,0 +1,240 @@
+import sys
+import os
+import gzip
+import zipfile
+from optparse import make_option
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core import serializers
+from google.appengine._internal.django.core.management.base import BaseCommand
+from google.appengine._internal.django.core.management.color import no_style
+from google.appengine._internal.django.db import connections, router, transaction, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.db.models import get_apps
+from google.appengine._internal.django.utils.itercompat import product
+
+try:
+ import bz2
+ has_bz2 = True
+except ImportError:
+ has_bz2 = False
+
+class Command(BaseCommand):
+ help = 'Installs the named fixture(s) in the database.'
+ args = "fixture [fixture ...]"
+
+ option_list = BaseCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
+ 'fixtures into. Defaults to the "default" database.'),
+ )
+
+ def handle(self, *fixture_labels, **options):
+ using = options.get('database', DEFAULT_DB_ALIAS)
+
+ connection = connections[using]
+ self.style = no_style()
+
+ verbosity = int(options.get('verbosity', 1))
+ show_traceback = options.get('traceback', False)
+
+ # commit is a stealth option - it isn't really useful as
+ # a command line option, but it can be useful when invoking
+ # loaddata from within another script.
+ # If commit=True, loaddata will use its own transaction;
+ # if commit=False, the data load SQL will become part of
+ # the transaction in place when loaddata was invoked.
+ commit = options.get('commit', True)
+
+ # Keep a count of the installed objects and fixtures
+ fixture_count = 0
+ loaded_object_count = 0
+ fixture_object_count = 0
+ models = set()
+
+ humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
+
+ # Get a cursor (even though we don't need one yet). This has
+ # the side effect of initializing the test database (if
+ # it isn't already initialized).
+ cursor = connection.cursor()
+
+ # Start transaction management. All fixtures are installed in a
+ # single transaction to ensure that all references are resolved.
+ if commit:
+ transaction.commit_unless_managed(using=using)
+ transaction.enter_transaction_management(using=using)
+ transaction.managed(True, using=using)
+
+ class SingleZipReader(zipfile.ZipFile):
+ def __init__(self, *args, **kwargs):
+ zipfile.ZipFile.__init__(self, *args, **kwargs)
+ if settings.DEBUG:
+ assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
+ def read(self):
+ return zipfile.ZipFile.read(self, self.namelist()[0])
+
+ compression_types = {
+ None: file,
+ 'gz': gzip.GzipFile,
+ 'zip': SingleZipReader
+ }
+ if has_bz2:
+ compression_types['bz2'] = bz2.BZ2File
+
+ app_module_paths = []
+ for app in get_apps():
+ if hasattr(app, '__path__'):
+ # It's a 'models/' subpackage
+ for path in app.__path__:
+ app_module_paths.append(path)
+ else:
+ # It's a models.py module
+ app_module_paths.append(app.__file__)
+
+ app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]
+ for fixture_label in fixture_labels:
+ parts = fixture_label.split('.')
+
+ if len(parts) > 1 and parts[-1] in compression_types:
+ compression_formats = [parts[-1]]
+ parts = parts[:-1]
+ else:
+ compression_formats = compression_types.keys()
+
+ if len(parts) == 1:
+ fixture_name = parts[0]
+ formats = serializers.get_public_serializer_formats()
+ else:
+ fixture_name, format = '.'.join(parts[:-1]), parts[-1]
+ if format in serializers.get_public_serializer_formats():
+ formats = [format]
+ else:
+ formats = []
+
+ if formats:
+ if verbosity > 1:
+ self.stdout.write("Loading '%s' fixtures...\n" % fixture_name)
+ else:
+ self.stderr.write(
+ self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format.\n" %
+ (fixture_name, format)))
+ if commit:
+ transaction.rollback(using=using)
+ transaction.leave_transaction_management(using=using)
+ return
+
+ if os.path.isabs(fixture_name):
+ fixture_dirs = [fixture_name]
+ else:
+ fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']
+
+ for fixture_dir in fixture_dirs:
+ if verbosity > 1:
+ self.stdout.write("Checking %s for fixtures...\n" % humanize(fixture_dir))
+
+ label_found = False
+ for combo in product([using, None], formats, compression_formats):
+ database, format, compression_format = combo
+ file_name = '.'.join(
+ p for p in [
+ fixture_name, database, format, compression_format
+ ]
+ if p
+ )
+
+ if verbosity > 1:
+ self.stdout.write("Trying %s for %s fixture '%s'...\n" % (humanize(fixture_dir), file_name, fixture_name))
+ full_path = os.path.join(fixture_dir, file_name)
+ open_method = compression_types[compression_format]
+ try:
+ fixture = open_method(full_path, 'r')
+ if label_found:
+ fixture.close()
+ self.stderr.write(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting.\n" %
+ (fixture_name, humanize(fixture_dir))))
+ if commit:
+ transaction.rollback(using=using)
+ transaction.leave_transaction_management(using=using)
+ return
+ else:
+ fixture_count += 1
+ objects_in_fixture = 0
+ loaded_objects_in_fixture = 0
+ if verbosity > 0:
+ self.stdout.write("Installing %s fixture '%s' from %s.\n" % (format, fixture_name, humanize(fixture_dir)))
+ try:
+ objects = serializers.deserialize(format, fixture, using=using)
+ for obj in objects:
+ objects_in_fixture += 1
+ if router.allow_syncdb(using, obj.object.__class__):
+ loaded_objects_in_fixture += 1
+ models.add(obj.object.__class__)
+ obj.save(using=using)
+ loaded_object_count += loaded_objects_in_fixture
+ fixture_object_count += objects_in_fixture
+ label_found = True
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except Exception:
+ import traceback
+ fixture.close()
+ if commit:
+ transaction.rollback(using=using)
+ transaction.leave_transaction_management(using=using)
+ if show_traceback:
+ traceback.print_exc()
+ else:
+ self.stderr.write(
+ self.style.ERROR("Problem installing fixture '%s': %s\n" %
+ (full_path, ''.join(traceback.format_exception(sys.exc_type,
+ sys.exc_value, sys.exc_traceback)))))
+ return
+ fixture.close()
+
+ # If the fixture we loaded contains 0 objects, assume that an
+ # error was encountered during fixture loading.
+ if objects_in_fixture == 0:
+ self.stderr.write(
+ self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)\n" %
+ (fixture_name)))
+ if commit:
+ transaction.rollback(using=using)
+ transaction.leave_transaction_management(using=using)
+ return
+
+ except Exception, e:
+ if verbosity > 1:
+ self.stdout.write("No %s fixture '%s' in %s.\n" % (format, fixture_name, humanize(fixture_dir)))
+
+ # If we found even one object in a fixture, we need to reset the
+ # database sequences.
+ if loaded_object_count > 0:
+ sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
+ if sequence_sql:
+ if verbosity > 1:
+ self.stdout.write("Resetting sequences\n")
+ for line in sequence_sql:
+ cursor.execute(line)
+
+ if commit:
+ transaction.commit(using=using)
+ transaction.leave_transaction_management(using=using)
+
+ if fixture_object_count == 0:
+ if verbosity > 0:
+ self.stdout.write("No fixtures found.\n")
+ else:
+ if verbosity > 0:
+ if fixture_object_count == loaded_object_count:
+ self.stdout.write("Installed %d object(s) from %d fixture(s)\n" % (
+ loaded_object_count, fixture_count))
+ else:
+ self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)\n" % (
+ loaded_object_count, fixture_object_count, fixture_count))
+
+ # Close the DB connection. This is required as a workaround for an
+ # edge case in MySQL: if the same connection is used to
+ # create tables, load data, and query, the query can return
+ # incorrect results. See Django #7572, MySQL #37735.
+ if commit:
+ connection.close()
diff --git a/google/appengine/_internal/django/core/management/commands/makemessages.py b/google/appengine/_internal/django/core/management/commands/makemessages.py
new file mode 100644
index 0000000..5f028ad
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/makemessages.py
@@ -0,0 +1,321 @@
+import fnmatch
+import glob
+import os
+import re
+import sys
+from itertools import dropwhile
+from optparse import make_option
+from subprocess import PIPE, Popen
+
+from google.appengine._internal.django.core.management.base import CommandError, BaseCommand
+from google.appengine._internal.django.utils.text import get_text_list
+
+pythonize_re = re.compile(r'(?:^|\n)\s*//')
+plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
+
+def handle_extensions(extensions=('html',)):
+ """
+ organizes multiple extensions that are separated with commas or passed by
+ using --extension/-e multiple times.
+
+ for example: running 'django-admin makemessages -e js,txt -e xhtml -a'
+ would result in a extension list: ['.js', '.txt', '.xhtml']
+
+ >>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
+ ['.html', '.js']
+ >>> handle_extensions(['.html, txt,.tpl'])
+ ['.html', '.tpl', '.txt']
+ """
+ ext_list = []
+ for ext in extensions:
+ ext_list.extend(ext.replace(' ','').split(','))
+ for i, ext in enumerate(ext_list):
+ if not ext.startswith('.'):
+ ext_list[i] = '.%s' % ext_list[i]
+
+ # we don't want *.py files here because of the way non-*.py files
+ # are handled in make_messages() (they are copied to file.ext.py files to
+ # trick xgettext to parse them as Python files)
+ return set([x for x in ext_list if x != '.py'])
+
+def _popen(cmd):
+ """
+ Friendly wrapper around Popen for Windows
+ """
+ p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt', universal_newlines=True)
+ return p.communicate()
+
+def walk(root, topdown=True, onerror=None, followlinks=False):
+ """
+ A version of os.walk that can follow symlinks for Python < 2.6
+ """
+ for dirpath, dirnames, filenames in os.walk(root, topdown, onerror):
+ yield (dirpath, dirnames, filenames)
+ if followlinks:
+ for d in dirnames:
+ p = os.path.join(dirpath, d)
+ if os.path.islink(p):
+ for link_dirpath, link_dirnames, link_filenames in walk(p):
+ yield (link_dirpath, link_dirnames, link_filenames)
+
+def is_ignored(path, ignore_patterns):
+ """
+ Helper function to check if the given path should be ignored or not.
+ """
+ for pattern in ignore_patterns:
+ if fnmatch.fnmatchcase(path, pattern):
+ return True
+ return False
+
+def find_files(root, ignore_patterns, verbosity, symlinks=False):
+ """
+ Helper function to get all files in the given root.
+ """
+ all_files = []
+ for (dirpath, dirnames, filenames) in walk(".", followlinks=symlinks):
+ for f in filenames:
+ norm_filepath = os.path.normpath(os.path.join(dirpath, f))
+ if is_ignored(norm_filepath, ignore_patterns):
+ if verbosity > 1:
+ sys.stdout.write('ignoring file %s in %s\n' % (f, dirpath))
+ else:
+ all_files.extend([(dirpath, f)])
+ all_files.sort()
+ return all_files
+
+def copy_plural_forms(msgs, locale, domain, verbosity):
+ """
+ Copies plural forms header contents from a Django catalog of locale to
+ the msgs string, inserting it at the right place. msgs should be the
+ contents of a newly created .po file.
+ """
+ import django
+ django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
+ if domain == 'djangojs':
+ domains = ('djangojs', 'django')
+ else:
+ domains = ('django',)
+ for domain in domains:
+ django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
+ if os.path.exists(django_po):
+ m = plural_forms_re.search(open(django_po, 'rU').read())
+ if m:
+ if verbosity > 1:
+ sys.stderr.write("copying plural forms: %s\n" % m.group('value'))
+ lines = []
+ seen = False
+ for line in msgs.split('\n'):
+ if not line and not seen:
+ line = '%s\n' % m.group('value')
+ seen = True
+ lines.append(line)
+ msgs = '\n'.join(lines)
+ break
+ return msgs
+
+
+def make_messages(locale=None, domain='django', verbosity='1', all=False,
+ extensions=None, symlinks=False, ignore_patterns=[]):
+ """
+ Uses the locale directory from the Django SVN tree or an application/
+ project to process all
+ """
+ # Need to ensure that the i18n framework is enabled
+ from google.appengine._internal.django.conf import settings
+ if settings.configured:
+ settings.USE_I18N = True
+ else:
+ settings.configure(USE_I18N = True)
+
+ from google.appengine._internal.django.utils.translation import templatize
+
+ invoked_for_django = False
+ if os.path.isdir(os.path.join('conf', 'locale')):
+ localedir = os.path.abspath(os.path.join('conf', 'locale'))
+ invoked_for_django = True
+ elif os.path.isdir('locale'):
+ localedir = os.path.abspath('locale')
+ else:
+ raise CommandError("This script should be run from the Django SVN tree or your project or app tree. If you did indeed run it from the SVN checkout or your project or application, maybe you are just missing the conf/locale (in the django tree) or locale (for project and application) directory? It is not created automatically, you have to create it by hand if you want to enable i18n for your project or application.")
+
+ if domain not in ('django', 'djangojs'):
+ raise CommandError("currently makemessages only supports domains 'django' and 'djangojs'")
+
+ if (locale is None and not all) or domain is None:
+ # backwards compatible error message
+ if not sys.argv[0].endswith("make-messages.py"):
+ message = "Type '%s help %s' for usage.\n" % (os.path.basename(sys.argv[0]), sys.argv[1])
+ else:
+ message = "usage: make-messages.py -l <language>\n or: make-messages.py -a\n"
+ raise CommandError(message)
+
+ # We require gettext version 0.15 or newer.
+ output = _popen('xgettext --version')[0]
+ match = re.search(r'(?P<major>\d+)\.(?P<minor>\d+)', output)
+ if match:
+ xversion = (int(match.group('major')), int(match.group('minor')))
+ if xversion < (0, 15):
+ raise CommandError("Django internationalization requires GNU gettext 0.15 or newer. You are using version %s, please upgrade your gettext toolset." % match.group())
+
+ languages = []
+ if locale is not None:
+ languages.append(locale)
+ elif all:
+ locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir))
+ languages = [os.path.basename(l) for l in locale_dirs]
+
+ for locale in languages:
+ if verbosity > 0:
+ print "processing language", locale
+ basedir = os.path.join(localedir, locale, 'LC_MESSAGES')
+ if not os.path.isdir(basedir):
+ os.makedirs(basedir)
+
+ pofile = os.path.join(basedir, '%s.po' % domain)
+ potfile = os.path.join(basedir, '%s.pot' % domain)
+
+ if os.path.exists(potfile):
+ os.unlink(potfile)
+
+ for dirpath, file in find_files(".", ignore_patterns, verbosity, symlinks=symlinks):
+ file_base, file_ext = os.path.splitext(file)
+ if domain == 'djangojs' and file_ext in extensions:
+ if verbosity > 1:
+ sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
+ src = open(os.path.join(dirpath, file), "rU").read()
+ src = pythonize_re.sub('\n#', src)
+ thefile = '%s.py' % file
+ f = open(os.path.join(dirpath, thefile), "w")
+ try:
+ f.write(src)
+ finally:
+ f.close()
+ cmd = 'xgettext -d %s -L Perl --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (domain, os.path.join(dirpath, thefile))
+ msgs, errors = _popen(cmd)
+ if errors:
+ raise CommandError("errors happened while running xgettext on %s\n%s" % (file, errors))
+ old = '#: '+os.path.join(dirpath, thefile)[2:]
+ new = '#: '+os.path.join(dirpath, file)[2:]
+ msgs = msgs.replace(old, new)
+ if os.path.exists(potfile):
+ # Strip the header
+ msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
+ else:
+ msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
+ if msgs:
+ f = open(potfile, 'ab')
+ try:
+ f.write(msgs)
+ finally:
+ f.close()
+ os.unlink(os.path.join(dirpath, thefile))
+ elif domain == 'django' and (file_ext == '.py' or file_ext in extensions):
+ thefile = file
+ if file_ext in extensions:
+ src = open(os.path.join(dirpath, file), "rU").read()
+ thefile = '%s.py' % file
+ try:
+ f = open(os.path.join(dirpath, thefile), "w")
+ try:
+ f.write(templatize(src))
+ finally:
+ f.close()
+ except SyntaxError, msg:
+ msg = "%s (file: %s)" % (msg, os.path.join(dirpath, file))
+ raise SyntaxError(msg)
+ if verbosity > 1:
+ sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
+ cmd = 'xgettext -d %s -L Python --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --keyword=ugettext_noop --keyword=ugettext_lazy --keyword=ungettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (
+ domain, os.path.join(dirpath, thefile))
+ msgs, errors = _popen(cmd)
+ if errors:
+ raise CommandError("errors happened while running xgettext on %s\n%s" % (file, errors))
+
+ if thefile != file:
+ old = '#: '+os.path.join(dirpath, thefile)[2:]
+ new = '#: '+os.path.join(dirpath, file)[2:]
+ msgs = msgs.replace(old, new)
+ if os.path.exists(potfile):
+ # Strip the header
+ msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
+ else:
+ msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
+ if msgs:
+ f = open(potfile, 'ab')
+ try:
+ f.write(msgs)
+ finally:
+ f.close()
+ if thefile != file:
+ os.unlink(os.path.join(dirpath, thefile))
+
+ if os.path.exists(potfile):
+ msgs, errors = _popen('msguniq --to-code=utf-8 "%s"' % potfile)
+ if errors:
+ raise CommandError("errors happened while running msguniq\n%s" % errors)
+ f = open(potfile, 'w')
+ try:
+ f.write(msgs)
+ finally:
+ f.close()
+ if os.path.exists(pofile):
+ msgs, errors = _popen('msgmerge -q "%s" "%s"' % (pofile, potfile))
+ if errors:
+ raise CommandError("errors happened while running msgmerge\n%s" % errors)
+ elif not invoked_for_django:
+ msgs = copy_plural_forms(msgs, locale, domain, verbosity)
+ f = open(pofile, 'wb')
+ try:
+ f.write(msgs)
+ finally:
+ f.close()
+ os.unlink(potfile)
+
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--locale', '-l', default=None, dest='locale',
+ help='Creates or updates the message files only for the given locale (e.g. pt_BR).'),
+ make_option('--domain', '-d', default='django', dest='domain',
+ help='The domain of the message files (default: "django").'),
+ make_option('--all', '-a', action='store_true', dest='all',
+ default=False, help='Reexamines all source code and templates for new translation strings and updates all message files for all available languages.'),
+ make_option('--extension', '-e', dest='extensions',
+ help='The file extension(s) to examine (default: ".html", separate multiple extensions with commas, or use -e multiple times)',
+ action='append'),
+ make_option('--symlinks', '-s', action='store_true', dest='symlinks',
+ default=False, help='Follows symlinks to directories when examining source code and templates for translation strings.'),
+ make_option('--ignore', '-i', action='append', dest='ignore_patterns',
+ default=[], metavar='PATTERN', help='Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.'),
+ make_option('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
+ default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*' and '*~'."),
+ )
+ help = "Runs over the entire source tree of the current directory and pulls out all strings marked for translation. It creates (or updates) a message file in the conf/locale (in the django tree) or locale (for project and application) directory."
+
+ requires_model_validation = False
+ can_import_settings = False
+
+ def handle(self, *args, **options):
+ if len(args) != 0:
+ raise CommandError("Command doesn't accept any arguments")
+
+ locale = options.get('locale')
+ domain = options.get('domain')
+ verbosity = int(options.get('verbosity'))
+ process_all = options.get('all')
+ extensions = options.get('extensions')
+ symlinks = options.get('symlinks')
+ ignore_patterns = options.get('ignore_patterns')
+ if options.get('use_default_ignore_patterns'):
+ ignore_patterns += ['CVS', '.*', '*~']
+ ignore_patterns = list(set(ignore_patterns))
+
+ if domain == 'djangojs':
+ extensions = handle_extensions(extensions or ['js'])
+ else:
+ extensions = handle_extensions(extensions or ['html'])
+
+ if verbosity > 1:
+ sys.stdout.write('examining files with the extensions: %s\n' % get_text_list(list(extensions), 'and'))
+
+ make_messages(locale, domain, verbosity, process_all, extensions, symlinks, ignore_patterns)
diff --git a/google/appengine/_internal/django/core/management/commands/reset.py b/google/appengine/_internal/django/core/management/commands/reset.py
new file mode 100644
index 0000000..22777ac
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/reset.py
@@ -0,0 +1,57 @@
+from optparse import make_option
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.management.base import AppCommand, CommandError
+from google.appengine._internal.django.core.management.color import no_style
+from google.appengine._internal.django.core.management.sql import sql_reset
+from google.appengine._internal.django.db import connections, transaction, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ option_list = AppCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to reset. '
+ 'Defaults to the "default" database.'),
+ )
+ help = "Executes ``sqlreset`` for the given app(s) in the current database."
+ args = '[appname ...]'
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ using = options.get('database', DEFAULT_DB_ALIAS)
+ connection = connections[using]
+
+ app_name = app.__name__.split('.')[-2]
+ self.style = no_style()
+
+ sql_list = sql_reset(app, self.style, connection)
+
+ if options.get('interactive'):
+ confirm = raw_input("""
+You have requested a database reset.
+This will IRREVERSIBLY DESTROY any data for
+the "%s" application in the database "%s".
+Are you sure you want to do this?
+
+Type 'yes' to continue, or 'no' to cancel: """ % (app_name, connection.settings_dict['NAME']))
+ else:
+ confirm = 'yes'
+
+ if confirm == 'yes':
+ try:
+ cursor = connection.cursor()
+ for sql in sql_list:
+ cursor.execute(sql)
+ except Exception, e:
+ transaction.rollback_unless_managed()
+ raise CommandError("""Error: %s couldn't be reset. Possible reasons:
+ * The database isn't running or isn't configured correctly.
+ * At least one of the database tables doesn't exist.
+ * The SQL was invalid.
+Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
+The full error: %s""" % (app_name, app_name, e))
+ transaction.commit_unless_managed()
+ else:
+ print "Reset cancelled."
diff --git a/google/appengine/_internal/django/core/management/commands/runfcgi.py b/google/appengine/_internal/django/core/management/commands/runfcgi.py
new file mode 100644
index 0000000..8c950af
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/runfcgi.py
@@ -0,0 +1,20 @@
+from google.appengine._internal.django.core.management.base import BaseCommand
+
+class Command(BaseCommand):
+ help = "Runs this project as a FastCGI application. Requires flup."
+ args = '[various KEY=val options, use `runfcgi help` for help]'
+
+ def handle(self, *args, **options):
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.utils import translation
+ # Activate the current language, because it won't get activated later.
+ try:
+ translation.activate(settings.LANGUAGE_CODE)
+ except AttributeError:
+ pass
+ from google.appengine._internal.django.core.servers.fastcgi import runfastcgi
+ runfastcgi(args)
+
+ def usage(self, subcommand):
+ from google.appengine._internal.django.core.servers.fastcgi import FASTCGI_HELP
+ return FASTCGI_HELP
diff --git a/google/appengine/_internal/django/core/management/commands/runserver.py b/google/appengine/_internal/django/core/management/commands/runserver.py
new file mode 100644
index 0000000..ed6326e
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/runserver.py
@@ -0,0 +1,84 @@
+from google.appengine._internal.django.core.management.base import BaseCommand, CommandError
+from optparse import make_option
+import os
+import sys
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--noreload', action='store_false', dest='use_reloader', default=True,
+ help='Tells Django to NOT use the auto-reloader.'),
+ make_option('--adminmedia', dest='admin_media_path', default='',
+ help='Specifies the directory from which to serve admin media.'),
+ )
+ help = "Starts a lightweight Web server for development."
+ args = '[optional port number, or ipaddr:port]'
+
+ # Validation is called explicitly each time the server is reloaded.
+ requires_model_validation = False
+
+ def handle(self, addrport='', *args, **options):
+ import django
+ from google.appengine._internal.django.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException
+ from google.appengine._internal.django.core.handlers.wsgi import WSGIHandler
+ if args:
+ raise CommandError('Usage is runserver %s' % self.args)
+ if not addrport:
+ addr = ''
+ port = '8000'
+ else:
+ try:
+ addr, port = addrport.split(':')
+ except ValueError:
+ addr, port = '', addrport
+ if not addr:
+ addr = '127.0.0.1'
+
+ if not port.isdigit():
+ raise CommandError("%r is not a valid port number." % port)
+
+ use_reloader = options.get('use_reloader', True)
+ admin_media_path = options.get('admin_media_path', '')
+ shutdown_message = options.get('shutdown_message', '')
+ quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
+
+ def inner_run():
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.utils import translation
+ print "Validating models..."
+ self.validate(display_num_errors=True)
+ print "\nDjango version %s, using settings %r" % (django.get_version(), settings.SETTINGS_MODULE)
+ print "Development server is running at http://%s:%s/" % (addr, port)
+ print "Quit the server with %s." % quit_command
+
+ # django.core.management.base forces the locale to en-us. We should
+ # set it up correctly for the first request (particularly important
+ # in the "--noreload" case).
+ translation.activate(settings.LANGUAGE_CODE)
+
+ try:
+ handler = AdminMediaHandler(WSGIHandler(), admin_media_path)
+ run(addr, int(port), handler)
+ except WSGIServerException, e:
+ # Use helpful error messages instead of ugly tracebacks.
+ ERRORS = {
+ 13: "You don't have permission to access that port.",
+ 98: "That port is already in use.",
+ 99: "That IP address can't be assigned-to.",
+ }
+ try:
+ error_text = ERRORS[e.args[0].args[0]]
+ except (AttributeError, KeyError):
+ error_text = str(e)
+ sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n')
+ # Need to use an OS exit because sys.exit doesn't work in a thread
+ os._exit(1)
+ except KeyboardInterrupt:
+ if shutdown_message:
+ print shutdown_message
+ sys.exit(0)
+
+ if use_reloader:
+ from google.appengine._internal.django.utils import autoreload
+ autoreload.main(inner_run)
+ else:
+ inner_run()
diff --git a/google/appengine/_internal/django/core/management/commands/shell.py b/google/appengine/_internal/django/core/management/commands/shell.py
new file mode 100644
index 0000000..60bfe87
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/shell.py
@@ -0,0 +1,69 @@
+import os
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+from optparse import make_option
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--plain', action='store_true', dest='plain',
+ help='Tells Django to use plain Python, not IPython.'),
+ )
+ help = "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ # XXX: (Temporary) workaround for ticket #1796: force early loading of all
+ # models from installed apps.
+ from google.appengine._internal.django.db.models.loading import get_models
+ loaded_models = get_models()
+
+ use_plain = options.get('plain', False)
+
+ try:
+ if use_plain:
+ # Don't bother loading IPython, because the user wants plain Python.
+ raise ImportError
+ try:
+ from IPython.frontend.terminal.embed import TerminalInteractiveShell
+ shell = TerminalInteractiveShell()
+ shell.mainloop()
+ except ImportError:
+ # IPython < 0.11
+ # Explicitly pass an empty list as arguments, because otherwise
+ # IPython would use sys.argv from this script.
+ try:
+ from IPython.Shell import IPShell
+ shell = IPShell(argv=[])
+ shell.mainloop()
+ except ImportError:
+ # IPython not found at all, raise ImportError
+ raise
+ except ImportError:
+ import code
+ # Set up a dictionary to serve as the environment for the shell, so
+ # that tab completion works on objects that are imported at runtime.
+ # See ticket 5082.
+ imported_objects = {}
+ try: # Try activating rlcompleter, because it's handy.
+ import readline
+ except ImportError:
+ pass
+ else:
+ # We don't have to wrap the following import in a 'try', because
+ # we already know 'readline' was imported successfully.
+ import rlcompleter
+ readline.set_completer(rlcompleter.Completer(imported_objects).complete)
+ readline.parse_and_bind("tab:complete")
+
+ # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
+ # conventions and get $PYTHONSTARTUP first then import user.
+ if not use_plain:
+ pythonrc = os.environ.get("PYTHONSTARTUP")
+ if pythonrc and os.path.isfile(pythonrc):
+ try:
+ execfile(pythonrc)
+ except NameError:
+ pass
+ # This will import .pythonrc.py as a side-effect
+ import user
+ code.interact(local=imported_objects)
diff --git a/google/appengine/_internal/django/core/management/commands/sql.py b/google/appengine/_internal/django/core/management/commands/sql.py
new file mode 100644
index 0000000..721792b
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sql.py
@@ -0,0 +1,19 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_create
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_create(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlall.py b/google/appengine/_internal/django/core/management/commands/sqlall.py
new file mode 100644
index 0000000..2d8ebf1
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlall.py
@@ -0,0 +1,19 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_all
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_all(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlclear.py b/google/appengine/_internal/django/core/management/commands/sqlclear.py
new file mode 100644
index 0000000..48cd7ed
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlclear.py
@@ -0,0 +1,19 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_delete
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the DROP TABLE SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_delete(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlcustom.py b/google/appengine/_internal/django/core/management/commands/sqlcustom.py
new file mode 100644
index 0000000..d4c9eb0
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlcustom.py
@@ -0,0 +1,19 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_custom
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the custom table modifying SQL statements for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_custom(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlflush.py b/google/appengine/_internal/django/core/management/commands/sqlflush.py
new file mode 100644
index 0000000..a7da269
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlflush.py
@@ -0,0 +1,19 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+from google.appengine._internal.django.core.management.sql import sql_flush
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(NoArgsCommand):
+ help = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
+
+ option_list = NoArgsCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+ )
+
+ output_transaction = True
+
+ def handle_noargs(self, **options):
+ return u'\n'.join(sql_flush(self.style, connections[options.get('database', DEFAULT_DB_ALIAS)], only_django=True)).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlindexes.py b/google/appengine/_internal/django/core/management/commands/sqlindexes.py
new file mode 100644
index 0000000..8975f81
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlindexes.py
@@ -0,0 +1,20 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_indexes
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_indexes(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlinitialdata.py b/google/appengine/_internal/django/core/management/commands/sqlinitialdata.py
new file mode 100644
index 0000000..ed8f6ed
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlinitialdata.py
@@ -0,0 +1,7 @@
+from google.appengine._internal.django.core.management.base import AppCommand, CommandError
+
+class Command(AppCommand):
+ help = "RENAMED: see 'sqlcustom'"
+
+ def handle(self, *apps, **options):
+ raise CommandError("This command has been renamed. Use the 'sqlcustom' command instead.")
diff --git a/google/appengine/_internal/django/core/management/commands/sqlreset.py b/google/appengine/_internal/django/core/management/commands/sqlreset.py
new file mode 100644
index 0000000..94ee3d2
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlreset.py
@@ -0,0 +1,20 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.core.management.sql import sql_reset
+from google.appengine._internal.django.db import connections, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = "Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s)."
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ return u'\n'.join(sql_reset(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/sqlsequencereset.py b/google/appengine/_internal/django/core/management/commands/sqlsequencereset.py
new file mode 100644
index 0000000..a58e797
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/sqlsequencereset.py
@@ -0,0 +1,20 @@
+from optparse import make_option
+
+from google.appengine._internal.django.core.management.base import AppCommand
+from google.appengine._internal.django.db import connections, models, DEFAULT_DB_ALIAS
+
+class Command(AppCommand):
+ help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
+
+ option_list = AppCommand.option_list + (
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
+ 'SQL for. Defaults to the "default" database.'),
+
+ )
+
+ output_transaction = True
+
+ def handle_app(self, app, **options):
+ connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
+ return u'\n'.join(connection.ops.sequence_reset_sql(self.style, models.get_models(app, include_auto_created=True))).encode('utf-8')
diff --git a/google/appengine/_internal/django/core/management/commands/startapp.py b/google/appengine/_internal/django/core/management/commands/startapp.py
new file mode 100644
index 0000000..df1576f
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/startapp.py
@@ -0,0 +1,47 @@
+import os
+
+from google.appengine._internal.django.core.management.base import copy_helper, CommandError, LabelCommand
+from google.appengine._internal.django.utils.importlib import import_module
+
+class Command(LabelCommand):
+ help = "Creates a Django app directory structure for the given app name in the current directory."
+ args = "[appname]"
+ label = 'application name'
+
+ requires_model_validation = False
+ # Can't import settings during this command, because they haven't
+ # necessarily been created.
+ can_import_settings = False
+
+ def handle_label(self, app_name, directory=None, **options):
+ if directory is None:
+ directory = os.getcwd()
+
+ # Determine the project_name by using the basename of directory,
+ # which should be the full path of the project directory (or the
+ # current directory if no directory was passed).
+ project_name = os.path.basename(directory)
+ if app_name == project_name:
+ raise CommandError("You cannot create an app with the same name"
+ " (%r) as your project." % app_name)
+
+ # Check that the app_name cannot be imported.
+ try:
+ import_module(app_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as an app name. Please try another name." % app_name)
+
+ copy_helper(self.style, 'app', app_name, directory, project_name)
+
+class ProjectCommand(Command):
+ help = ("Creates a Django app directory structure for the given app name"
+ " in this project's directory.")
+
+ def __init__(self, project_directory):
+ super(ProjectCommand, self).__init__()
+ self.project_directory = project_directory
+
+ def handle_label(self, app_name, **options):
+ super(ProjectCommand, self).handle_label(app_name, self.project_directory, **options)
diff --git a/google/appengine/_internal/django/core/management/commands/startproject.py b/google/appengine/_internal/django/core/management/commands/startproject.py
new file mode 100644
index 0000000..a10a923
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/startproject.py
@@ -0,0 +1,39 @@
+from google.appengine._internal.django.core.management.base import copy_helper, CommandError, LabelCommand
+from google.appengine._internal.django.utils.importlib import import_module
+import os
+import re
+from random import choice
+
+class Command(LabelCommand):
+ help = "Creates a Django project directory structure for the given project name in the current directory."
+ args = "[projectname]"
+ label = 'project name'
+
+ requires_model_validation = False
+ # Can't import settings during this command, because they haven't
+ # necessarily been created.
+ can_import_settings = False
+
+ def handle_label(self, project_name, **options):
+ # Determine the project_name a bit naively -- by looking at the name of
+ # the parent directory.
+ directory = os.getcwd()
+
+ # Check that the project_name cannot be imported.
+ try:
+ import_module(project_name)
+ except ImportError:
+ pass
+ else:
+ raise CommandError("%r conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name." % project_name)
+
+ copy_helper(self.style, 'project', project_name, directory)
+
+ # Create a random SECRET_KEY hash, and put it in the main settings.
+ main_settings_file = os.path.join(directory, project_name, 'settings.py')
+ settings_contents = open(main_settings_file, 'r').read()
+ fp = open(main_settings_file, 'w')
+ secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
+ settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
+ fp.write(settings_contents)
+ fp.close()
diff --git a/google/appengine/_internal/django/core/management/commands/syncdb.py b/google/appengine/_internal/django/core/management/commands/syncdb.py
new file mode 100644
index 0000000..b925eb0
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/syncdb.py
@@ -0,0 +1,156 @@
+from optparse import make_option
+import sys
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+from google.appengine._internal.django.core.management.color import no_style
+from google.appengine._internal.django.core.management.sql import custom_sql_for_model, emit_post_sync_signal
+from google.appengine._internal.django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.utils.datastructures import SortedDict
+from google.appengine._internal.django.utils.importlib import import_module
+
+
+class Command(NoArgsCommand):
+ option_list = NoArgsCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--database', action='store', dest='database',
+ default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. '
+ 'Defaults to the "default" database.'),
+ )
+ help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
+
+ def handle_noargs(self, **options):
+
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive')
+ show_traceback = options.get('traceback', False)
+
+ # Stealth option -- 'load_initial_data' is used by the testing setup
+ # process to disable initial fixture loading.
+ load_initial_data = options.get('load_initial_data', True)
+
+ self.style = no_style()
+
+ # Import the 'management' module within each installed app, to register
+ # dispatcher events.
+ for app_name in settings.INSTALLED_APPS:
+ try:
+ import_module('.management', app_name)
+ except ImportError, exc:
+ # This is slightly hackish. We want to ignore ImportErrors
+ # if the "management" module itself is missing -- but we don't
+ # want to ignore the exception if the management module exists
+ # but raises an ImportError for some reason. The only way we
+ # can do this is to check the text of the exception. Note that
+ # we're a bit broad in how we check the text, because different
+ # Python implementations may not use the same text.
+ # CPython uses the text "No module named management"
+ # PyPy uses "No module named myproject.myapp.management"
+ msg = exc.args[0]
+ if not msg.startswith('No module named') or 'management' not in msg:
+ raise
+
+ db = options.get('database', DEFAULT_DB_ALIAS)
+ connection = connections[db]
+ cursor = connection.cursor()
+
+ # Get a list of already installed *models* so that references work right.
+ tables = connection.introspection.table_names()
+ seen_models = connection.introspection.installed_models(tables)
+ created_models = set()
+ pending_references = {}
+
+ # Build the manifest of apps and models that are to be synchronized
+ all_models = [
+ (app.__name__.split('.')[-2],
+ [m for m in models.get_models(app, include_auto_created=True)
+ if router.allow_syncdb(db, m)])
+ for app in models.get_apps()
+ ]
+ def model_installed(model):
+ opts = model._meta
+ converter = connection.introspection.table_name_converter
+ return not ((converter(opts.db_table) in tables) or
+ (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))
+
+ manifest = SortedDict(
+ (app_name, filter(model_installed, model_list))
+ for app_name, model_list in all_models
+ )
+
+ # Create the tables for each model
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ # Create the model's database table, if it doesn't already exist.
+ if verbosity >= 2:
+ print "Processing %s.%s model" % (app_name, model._meta.object_name)
+ sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
+ seen_models.add(model)
+ created_models.add(model)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in seen_models:
+ sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references))
+ sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
+ if verbosity >= 1 and sql:
+ print "Creating table %s" % model._meta.db_table
+ for statement in sql:
+ cursor.execute(statement)
+ tables.append(connection.introspection.table_name_converter(model._meta.db_table))
+
+
+ transaction.commit_unless_managed(using=db)
+
+ # Send the post_syncdb signal, so individual apps can do whatever they need
+ # to do at this point.
+ emit_post_sync_signal(created_models, verbosity, interactive, db)
+
+ # The connection may have been closed by a syncdb handler.
+ cursor = connection.cursor()
+
+ # Install custom SQL for the app (but only if this
+ # is a model we've just created)
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ if model in created_models:
+ custom_sql = custom_sql_for_model(model, self.style, connection)
+ if custom_sql:
+ if verbosity >= 1:
+ print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in custom_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
+ if show_traceback:
+ import traceback
+ traceback.print_exc()
+ transaction.rollback_unless_managed(using=db)
+ else:
+ transaction.commit_unless_managed(using=db)
+ else:
+ if verbosity >= 2:
+ print "No custom SQL for %s.%s model" % (app_name, model._meta.object_name)
+
+ # Install SQL indicies for all newly created models
+ for app_name, model_list in manifest.items():
+ for model in model_list:
+ if model in created_models:
+ index_sql = connection.creation.sql_indexes_for_model(model, self.style)
+ if index_sql:
+ if verbosity >= 1:
+ print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
+ try:
+ for sql in index_sql:
+ cursor.execute(sql)
+ except Exception, e:
+ sys.stderr.write("Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
+ transaction.rollback_unless_managed(using=db)
+ else:
+ transaction.commit_unless_managed(using=db)
+
+ # Load initial_data fixtures (unless that has been disabled)
+ if load_initial_data:
+ from google.appengine._internal.django.core.management import call_command
+ call_command('loaddata', 'initial_data', verbosity=verbosity, database=db)
diff --git a/google/appengine/_internal/django/core/management/commands/test.py b/google/appengine/_internal/django/core/management/commands/test.py
new file mode 100644
index 0000000..582d074
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/test.py
@@ -0,0 +1,40 @@
+from google.appengine._internal.django.core.management.base import BaseCommand
+from optparse import make_option
+import sys
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--noinput', action='store_false', dest='interactive', default=True,
+ help='Tells Django to NOT prompt the user for input of any kind.'),
+ make_option('--failfast', action='store_true', dest='failfast', default=False,
+ help='Tells Django to stop running the test suite after first failed test.')
+ )
+ help = 'Runs the test suite for the specified applications, or the entire site if no apps are specified.'
+ args = '[appname ...]'
+
+ requires_model_validation = False
+
+ def handle(self, *test_labels, **options):
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.test.utils import get_runner
+
+ verbosity = int(options.get('verbosity', 1))
+ interactive = options.get('interactive', True)
+ failfast = options.get('failfast', False)
+ TestRunner = get_runner(settings)
+
+ if hasattr(TestRunner, 'func_name'):
+ # Pre 1.2 test runners were just functions,
+ # and did not support the 'failfast' option.
+ import warnings
+ warnings.warn(
+ 'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.',
+ PendingDeprecationWarning
+ )
+ failures = TestRunner(test_labels, verbosity=verbosity, interactive=interactive)
+ else:
+ test_runner = TestRunner(verbosity=verbosity, interactive=interactive, failfast=failfast)
+ failures = test_runner.run_tests(test_labels)
+
+ if failures:
+ sys.exit(bool(failures))
diff --git a/google/appengine/_internal/django/core/management/commands/testserver.py b/google/appengine/_internal/django/core/management/commands/testserver.py
new file mode 100644
index 0000000..7f073c3
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/testserver.py
@@ -0,0 +1,33 @@
+from google.appengine._internal.django.core.management.base import BaseCommand
+
+from optparse import make_option
+
+class Command(BaseCommand):
+ option_list = BaseCommand.option_list + (
+ make_option('--addrport', action='store', dest='addrport',
+ type='string', default='',
+ help='port number or ipaddr:port to run the server on'),
+ )
+ help = 'Runs a development server with data from the given fixture(s).'
+ args = '[fixture ...]'
+
+ requires_model_validation = False
+
+ def handle(self, *fixture_labels, **options):
+ from google.appengine._internal.django.core.management import call_command
+ from google.appengine._internal.django.db import connection
+
+ verbosity = int(options.get('verbosity', 1))
+ addrport = options.get('addrport')
+
+ # Create a test database.
+ db_name = connection.creation.create_test_db(verbosity=verbosity)
+
+ # Import the fixture data into the test database.
+ call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
+
+ # Run the development server. Turn off auto-reloading because it causes
+ # a strange error -- it causes this handle() method to be called
+ # multiple times.
+ shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
+ call_command('runserver', addrport=addrport, shutdown_message=shutdown_message, use_reloader=False)
diff --git a/google/appengine/_internal/django/core/management/commands/validate.py b/google/appengine/_internal/django/core/management/commands/validate.py
new file mode 100644
index 0000000..eeec7a2
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/commands/validate.py
@@ -0,0 +1,9 @@
+from google.appengine._internal.django.core.management.base import NoArgsCommand
+
+class Command(NoArgsCommand):
+ help = "Validates all installed models."
+
+ requires_model_validation = False
+
+ def handle_noargs(self, **options):
+ self.validate(display_num_errors=True)
diff --git a/google/appengine/_internal/django/core/management/sql.py b/google/appengine/_internal/django/core/management/sql.py
new file mode 100644
index 0000000..ab287ae
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/sql.py
@@ -0,0 +1,182 @@
+import os
+import re
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.management.base import CommandError
+from google.appengine._internal.django.db import models
+from google.appengine._internal.django.db.models import get_models
+
+def sql_create(app, style, connection):
+ "Returns a list of the CREATE TABLE SQL statements for the given app."
+
+ if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
+ # This must be the "dummy" database backend, which means the user
+ # hasn't set ENGINE for the databse.
+ raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
+ "because you haven't specified the ENGINE setting for the database.\n" +
+ "Edit your settings file and change DATBASES['default']['ENGINE'] to something like\n" +
+ "'django.db.backends.postgresql' or 'django.db.backends.mysql'.")
+
+ # Get installed models, so we generate REFERENCES right.
+ # We trim models from the current app so that the sqlreset command does not
+ # generate invalid SQL (leaving models out of known_models is harmless, so
+ # we can be conservative).
+ app_models = models.get_models(app, include_auto_created=True)
+ final_output = []
+ tables = connection.introspection.table_names()
+ known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
+ pending_references = {}
+
+ for model in app_models:
+ output, references = connection.creation.sql_create_model(model, style, known_models)
+ final_output.extend(output)
+ for refto, refs in references.items():
+ pending_references.setdefault(refto, []).extend(refs)
+ if refto in known_models:
+ final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
+ final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
+ # Keep track of the fact that we've created the table for this model.
+ known_models.add(model)
+
+ # Handle references to tables that are from other apps
+ # but don't exist physically.
+ not_installed_models = set(pending_references.keys())
+ if not_installed_models:
+ alter_sql = []
+ for model in not_installed_models:
+ alter_sql.extend(['-- ' + sql for sql in
+ connection.creation.sql_for_pending_references(model, style, pending_references)])
+ if alter_sql:
+ final_output.append('-- The following references should be added but depend on non-existent tables:')
+ final_output.extend(alter_sql)
+
+ return final_output
+
+def sql_delete(app, style, connection):
+ "Returns a list of the DROP TABLE SQL statements for the given app."
+
+ # This should work even if a connection isn't available
+ try:
+ cursor = connection.cursor()
+ except:
+ cursor = None
+
+ # Figure out which tables already exist
+ if cursor:
+ table_names = connection.introspection.get_table_list(cursor)
+ else:
+ table_names = []
+
+ output = []
+
+ # Output DROP TABLE statements for standard application tables.
+ to_delete = set()
+
+ references_to_delete = {}
+ app_models = models.get_models(app, include_auto_created=True)
+ for model in app_models:
+ if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ # The table exists, so it needs to be dropped
+ opts = model._meta
+ for f in opts.local_fields:
+ if f.rel and f.rel.to not in to_delete:
+ references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
+
+ to_delete.add(model)
+
+ for model in app_models:
+ if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
+ output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
+
+ # Close database connection explicitly, in case this output is being piped
+ # directly into a database client, to avoid locking issues.
+ if cursor:
+ cursor.close()
+ connection.close()
+
+ return output[::-1] # Reverse it, to deal with table dependencies.
+
+def sql_reset(app, style, connection):
+ "Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
+ return sql_delete(app, style, connection) + sql_all(app, style, connection)
+
+def sql_flush(style, connection, only_django=False):
+ """
+ Returns a list of the SQL statements used to flush the database.
+
+ If only_django is True, then only table names that have associated Django
+ models and are in INSTALLED_APPS will be included.
+ """
+ if only_django:
+ tables = connection.introspection.django_table_names(only_existing=True)
+ else:
+ tables = connection.introspection.table_names()
+ statements = connection.ops.sql_flush(style, tables, connection.introspection.sequence_list())
+ return statements
+
+def sql_custom(app, style, connection):
+ "Returns a list of the custom table modifying SQL statements for the given app."
+ output = []
+
+ app_models = get_models(app)
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
+
+ for model in app_models:
+ output.extend(custom_sql_for_model(model, style, connection))
+
+ return output
+
+def sql_indexes(app, style, connection):
+ "Returns a list of the CREATE INDEX SQL statements for all models in the given app."
+ output = []
+ for model in models.get_models(app):
+ output.extend(connection.creation.sql_indexes_for_model(model, style))
+ return output
+
+def sql_all(app, style, connection):
+ "Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
+ return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
+
+def custom_sql_for_model(model, style, connection):
+ opts = model._meta
+ app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
+ output = []
+
+ # Post-creation SQL should come before any initial SQL data is loaded.
+ # However, this should not be done for models that are unmanaged or
+ # for fields that are part of a parent model (via model inheritance).
+ if opts.managed:
+ post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
+ for f in post_sql_fields:
+ output.extend(f.post_create_sql(style, model._meta.db_table))
+
+ # Some backends can't execute more than one SQL statement at a time,
+ # so split into separate statements.
+ statements = re.compile(r";[ \t]*$", re.M)
+
+ # Find custom SQL, if it's available.
+ backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
+ sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), backend_name)),
+ os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
+ for sql_file in sql_files:
+ if os.path.exists(sql_file):
+ fp = open(sql_file, 'U')
+ for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)):
+ # Remove any comments from the file
+ statement = re.sub(ur"--.*([\n\Z]|$)", "", statement)
+ if statement.strip():
+ output.append(statement + u";")
+ fp.close()
+
+ return output
+
+
+def emit_post_sync_signal(created_models, verbosity, interactive, db):
+ # Emit the post_sync signal for every application.
+ for app in models.get_apps():
+ app_name = app.__name__.split('.')[-2]
+ if verbosity >= 2:
+ print "Running post-sync handlers for application", app_name
+ models.signals.post_syncdb.send(sender=app, app=app,
+ created_models=created_models, verbosity=verbosity,
+ interactive=interactive, db=db)
diff --git a/google/appengine/_internal/django/core/management/validation.py b/google/appengine/_internal/django/core/management/validation.py
new file mode 100644
index 0000000..e3f408d
--- /dev/null
+++ b/google/appengine/_internal/django/core/management/validation.py
@@ -0,0 +1,302 @@
+import sys
+
+from google.appengine._internal.django.contrib.contenttypes.generic import GenericForeignKey, GenericRelation
+from google.appengine._internal.django.core.management.color import color_style
+from google.appengine._internal.django.utils.itercompat import is_iterable
+
+try:
+ any
+except NameError:
+ from google.appengine._internal.django.utils.itercompat import any
+
+class ModelErrorCollection:
+ def __init__(self, outfile=sys.stdout):
+ self.errors = []
+ self.outfile = outfile
+ self.style = color_style()
+
+ def add(self, context, error):
+ self.errors.append((context, error))
+ self.outfile.write(self.style.ERROR("%s: %s\n" % (context, error)))
+
+def get_validation_errors(outfile, app=None):
+ """
+ Validates all models that are part of the specified app. If no app name is provided,
+ validates all models of all installed apps. Writes errors, if any, to outfile.
+ Returns number of errors.
+ """
+ from google.appengine._internal.django.conf import settings
+ from google.appengine._internal.django.db import models, connection
+ from google.appengine._internal.django.db.models.loading import get_app_errors
+ from google.appengine._internal.django.db.models.fields.related import RelatedObject
+
+ e = ModelErrorCollection(outfile)
+
+ for (app_name, error) in get_app_errors().items():
+ e.add(app_name, error)
+
+ for cls in models.get_models(app):
+ opts = cls._meta
+
+ # Do field-specific validation.
+ for f in opts.local_fields:
+ if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
+ e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
+ if f.name.endswith('_'):
+ e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name)
+ if isinstance(f, models.CharField):
+ try:
+ max_length = int(f.max_length)
+ if max_length <= 0:
+ e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
+ except (ValueError, TypeError):
+ e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
+ if isinstance(f, models.DecimalField):
+ decimalp_ok, mdigits_ok = False, False
+ decimalp_msg ='"%s": DecimalFields require a "decimal_places" attribute that is a non-negative integer.'
+ try:
+ decimal_places = int(f.decimal_places)
+ if decimal_places < 0:
+ e.add(opts, decimalp_msg % f.name)
+ else:
+ decimalp_ok = True
+ except (ValueError, TypeError):
+ e.add(opts, decimalp_msg % f.name)
+ mdigits_msg = '"%s": DecimalFields require a "max_digits" attribute that is a positive integer.'
+ try:
+ max_digits = int(f.max_digits)
+ if max_digits <= 0:
+ e.add(opts, mdigits_msg % f.name)
+ else:
+ mdigits_ok = True
+ except (ValueError, TypeError):
+ e.add(opts, mdigits_msg % f.name)
+ invalid_values_msg = '"%s": DecimalFields require a "max_digits" attribute value that is greater than the value of the "decimal_places" attribute.'
+ if decimalp_ok and mdigits_ok:
+ if decimal_places >= max_digits:
+ e.add(opts, invalid_values_msg % f.name)
+ if isinstance(f, models.FileField) and not f.upload_to:
+ e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
+ if isinstance(f, models.ImageField):
+ # Try to import PIL in either of the two ways it can end up installed.
+ try:
+ from PIL import Image
+ except ImportError:
+ try:
+ import Image
+ except ImportError:
+ e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
+ if isinstance(f, models.BooleanField) and getattr(f, 'null', False):
+ e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name)
+ if f.choices:
+ if isinstance(f.choices, basestring) or not is_iterable(f.choices):
+ e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
+ else:
+ for c in f.choices:
+ if not isinstance(c, (list, tuple)) or len(c) != 2:
+ e.add(opts, '"%s": "choices" should be a sequence of two-tuples.' % f.name)
+ if f.db_index not in (None, True, False):
+ e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
+
+ # Perform any backend-specific field validation.
+ connection.validation.validate_field(e, opts, f)
+
+ # Check to see if the related field will clash with any existing
+ # fields, m2m fields, m2m related objects or related objects
+ if f.rel:
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, (str, unicode)):
+ continue
+
+ # Make sure the related field specified by a ForeignKey is unique
+ if not f.rel.to._meta.get_field(f.rel.field_name).unique:
+ e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.rel.field_name, f.rel.to.__name__))
+
+ rel_opts = f.rel.to._meta
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ if not f.rel.is_hidden():
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ seen_intermediary_signatures = []
+ for i, f in enumerate(opts.local_many_to_many):
+ # Check to see if the related m2m field will clash with any
+ # existing fields, m2m fields, m2m related objects or related
+ # objects
+ if f.rel.to not in models.get_models():
+ e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
+ # it is a string and we could not find the model it refers to
+ # so skip the next section
+ if isinstance(f.rel.to, (str, unicode)):
+ continue
+
+ # Check that the field is not set to unique. ManyToManyFields do not support unique.
+ if f.unique:
+ e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name)
+
+ if f.rel.through is not None and not isinstance(f.rel.through, basestring):
+ from_model, to_model = cls, f.rel.to
+ if from_model == to_model and f.rel.symmetrical and not f.rel.through._meta.auto_created:
+ e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.")
+ seen_from, seen_to, seen_self = False, False, 0
+ for inter_field in f.rel.through._meta.fields:
+ rel_to = getattr(inter_field.rel, 'to', None)
+ if from_model == to_model: # relation to self
+ if rel_to == from_model:
+ seen_self += 1
+ if seen_self > 2:
+ e.add(opts, "Intermediary model %s has more than "
+ "two foreign keys to %s, which is ambiguous "
+ "and is not permitted." % (
+ f.rel.through._meta.object_name,
+ from_model._meta.object_name
+ )
+ )
+ else:
+ if rel_to == from_model:
+ if seen_from:
+ e.add(opts, "Intermediary model %s has more "
+ "than one foreign key to %s, which is "
+ "ambiguous and is not permitted." % (
+ f.rel.through._meta.object_name,
+ from_model._meta.object_name
+ )
+ )
+ else:
+ seen_from = True
+ elif rel_to == to_model:
+ if seen_to:
+ e.add(opts, "Intermediary model %s has more "
+ "than one foreign key to %s, which is "
+ "ambiguous and is not permitted." % (
+ f.rel.through._meta.object_name,
+ rel_to._meta.object_name
+ )
+ )
+ else:
+ seen_to = True
+ if f.rel.through not in models.get_models(include_auto_created=True):
+ e.add(opts, "'%s' specifies an m2m relation through model "
+ "%s, which has not been installed." % (f.name, f.rel.through)
+ )
+ signature = (f.rel.to, cls, f.rel.through)
+ if signature in seen_intermediary_signatures:
+ e.add(opts, "The model %s has two manually-defined m2m "
+ "relations through the model %s, which is not "
+ "permitted. Please consider using an extra field on "
+ "your intermediary model instead." % (
+ cls._meta.object_name,
+ f.rel.through._meta.object_name
+ )
+ )
+ else:
+ seen_intermediary_signatures.append(signature)
+ if not f.rel.through._meta.auto_created:
+ seen_related_fk, seen_this_fk = False, False
+ for field in f.rel.through._meta.fields:
+ if field.rel:
+ if not seen_related_fk and field.rel.to == f.rel.to:
+ seen_related_fk = True
+ elif field.rel.to == cls:
+ seen_this_fk = True
+ if not seen_related_fk or not seen_this_fk:
+ e.add(opts, "'%s' is a manually-defined m2m relation "
+ "through model %s, which does not have foreign keys "
+ "to %s and %s" % (f.name, f.rel.through._meta.object_name,
+ f.rel.to._meta.object_name, cls._meta.object_name)
+ )
+ elif isinstance(f.rel.through, basestring):
+ e.add(opts, "'%s' specifies an m2m relation through model %s, "
+ "which has not been installed" % (f.name, f.rel.through)
+ )
+ elif isinstance(f, GenericRelation):
+ if not any([isinstance(vfield, GenericForeignKey) for vfield in f.rel.to._meta.virtual_fields]):
+ e.add(opts, "Model '%s' must have a GenericForeignKey in "
+ "order to create a GenericRelation that points to it."
+ % f.rel.to.__name__
+ )
+
+ rel_opts = f.rel.to._meta
+ rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
+ rel_query_name = f.related_query_name()
+ # If rel_name is none, there is no reverse accessor (this only
+ # occurs for symmetrical m2m relations to self). If this is the
+ # case, there are no clashes to check for this field, as there are
+ # no reverse descriptors for this field.
+ if rel_name is not None:
+ for r in rel_opts.fields:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.local_many_to_many:
+ if r.name == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ if r.name == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
+ for r in rel_opts.get_all_related_many_to_many_objects():
+ if r.field is not f:
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ for r in rel_opts.get_all_related_objects():
+ if r.get_accessor_name() == rel_name:
+ e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+ if r.get_accessor_name() == rel_query_name:
+ e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
+
+ # Check ordering attribute.
+ if opts.ordering:
+ for field_name in opts.ordering:
+ if field_name == '?': continue
+ if field_name.startswith('-'):
+ field_name = field_name[1:]
+ if opts.order_with_respect_to and field_name == '_order':
+ continue
+ # Skip ordering in the format field1__field2 (FIXME: checking
+ # this format would be nice, but it's a little fiddly).
+ if '__' in field_name:
+ continue
+ try:
+ opts.get_field(field_name, many_to_many=False)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
+
+ # Check unique_together.
+ for ut in opts.unique_together:
+ for field_name in ut:
+ try:
+ f = opts.get_field(field_name, many_to_many=True)
+ except models.FieldDoesNotExist:
+ e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name)
+ else:
+ if isinstance(f.rel, models.ManyToManyRel):
+ e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name)
+ if f not in opts.local_fields:
+ e.add(opts, '"unique_together" refers to %s. This is not in the same model as the unique_together statement.' % f.name)
+
+ return len(e.errors)
diff --git a/google/appengine/_internal/django/core/paginator.py b/google/appengine/_internal/django/core/paginator.py
new file mode 100644
index 0000000..495cdf2
--- /dev/null
+++ b/google/appengine/_internal/django/core/paginator.py
@@ -0,0 +1,120 @@
+from math import ceil
+
+class InvalidPage(Exception):
+ pass
+
+class PageNotAnInteger(InvalidPage):
+ pass
+
+class EmptyPage(InvalidPage):
+ pass
+
+class Paginator(object):
+ def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True):
+ self.object_list = object_list
+ self.per_page = per_page
+ self.orphans = orphans
+ self.allow_empty_first_page = allow_empty_first_page
+ self._num_pages = self._count = None
+
+ def validate_number(self, number):
+ "Validates the given 1-based page number."
+ try:
+ number = int(number)
+ except ValueError:
+ raise PageNotAnInteger('That page number is not an integer')
+ if number < 1:
+ raise EmptyPage('That page number is less than 1')
+ if number > self.num_pages:
+ if number == 1 and self.allow_empty_first_page:
+ pass
+ else:
+ raise EmptyPage('That page contains no results')
+ return number
+
+ def page(self, number):
+ "Returns a Page object for the given 1-based page number."
+ number = self.validate_number(number)
+ bottom = (number - 1) * self.per_page
+ top = bottom + self.per_page
+ if top + self.orphans >= self.count:
+ top = self.count
+ return Page(self.object_list[bottom:top], number, self)
+
+ def _get_count(self):
+ "Returns the total number of objects, across all pages."
+ if self._count is None:
+ try:
+ self._count = self.object_list.count()
+ except (AttributeError, TypeError):
+ # AttributeError if object_list has no count() method.
+ # TypeError if object_list.count() requires arguments
+ # (i.e. is of type list).
+ self._count = len(self.object_list)
+ return self._count
+ count = property(_get_count)
+
+ def _get_num_pages(self):
+ "Returns the total number of pages."
+ if self._num_pages is None:
+ if self.count == 0 and not self.allow_empty_first_page:
+ self._num_pages = 0
+ else:
+ hits = max(1, self.count - self.orphans)
+ self._num_pages = int(ceil(hits / float(self.per_page)))
+ return self._num_pages
+ num_pages = property(_get_num_pages)
+
+ def _get_page_range(self):
+ """
+ Returns a 1-based range of pages for iterating through within
+ a template for loop.
+ """
+ return range(1, self.num_pages + 1)
+ page_range = property(_get_page_range)
+
+QuerySetPaginator = Paginator # For backwards-compatibility.
+
+class Page(object):
+ def __init__(self, object_list, number, paginator):
+ self.object_list = object_list
+ self.number = number
+ self.paginator = paginator
+
+ def __repr__(self):
+ return '<Page %s of %s>' % (self.number, self.paginator.num_pages)
+
+ def has_next(self):
+ return self.number < self.paginator.num_pages
+
+ def has_previous(self):
+ return self.number > 1
+
+ def has_other_pages(self):
+ return self.has_previous() or self.has_next()
+
+ def next_page_number(self):
+ return self.number + 1
+
+ def previous_page_number(self):
+ return self.number - 1
+
+ def start_index(self):
+ """
+ Returns the 1-based index of the first object on this page,
+ relative to total objects in the paginator.
+ """
+ # Special case, return zero if no items.
+ if self.paginator.count == 0:
+ return 0
+ return (self.paginator.per_page * (self.number - 1)) + 1
+
+ def end_index(self):
+ """
+ Returns the 1-based index of the last object on this page,
+ relative to total objects found (hits).
+ """
+ # Special case for the last page because there can be orphans.
+ if self.number == self.paginator.num_pages:
+ return self.paginator.count
+ return self.number * self.paginator.per_page
diff --git a/google/appengine/_internal/django/core/serializers/__init__.py b/google/appengine/_internal/django/core/serializers/__init__.py
new file mode 100644
index 0000000..44ecc40
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/__init__.py
@@ -0,0 +1,117 @@
+"""
+Interfaces for serializing Django objects.
+
+Usage::
+
+ from google.appengine._internal.django.core import serializers
+ json = serializers.serialize("json", some_query_set)
+ objects = list(serializers.deserialize("json", json))
+
+To add your own serializers, use the SERIALIZATION_MODULES setting::
+
+ SERIALIZATION_MODULES = {
+ "csv" : "path.to.csv.serializer",
+ "txt" : "path.to.txt.serializer",
+ }
+
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils import importlib
+
+# Built-in serializers
+BUILTIN_SERIALIZERS = {
+ "xml" : "django.core.serializers.xml_serializer",
+ "python" : "django.core.serializers.python",
+ "json" : "django.core.serializers.json",
+}
+
+# Check for PyYaml and register the serializer if it's available.
+try:
+ import yaml
+ BUILTIN_SERIALIZERS["yaml"] = "django.core.serializers.pyyaml"
+except ImportError:
+ pass
+
+_serializers = {}
+
+def register_serializer(format, serializer_module, serializers=None):
+ """Register a new serializer.
+
+ ``serializer_module`` should be the fully qualified module name
+ for the serializer.
+
+ If ``serializers`` is provided, the registration will be added
+ to the provided dictionary.
+
+ If ``serializers`` is not provided, the registration will be made
+ directly into the global register of serializers. Adding serializers
+ directly is not a thread-safe operation.
+ """
+ if serializers is None and not _serializers:
+ _load_serializers()
+ module = importlib.import_module(serializer_module)
+ if serializers is None:
+ _serializers[format] = module
+ else:
+ serializers[format] = module
+
+def unregister_serializer(format):
+ "Unregister a given serializer. This is not a thread-safe operation."
+ if not _serializers:
+ _load_serializers()
+ del _serializers[format]
+
+def get_serializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Serializer
+
+def get_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return _serializers.keys()
+
+def get_public_serializer_formats():
+ if not _serializers:
+ _load_serializers()
+ return [k for k, v in _serializers.iteritems() if not v.Serializer.internal_use_only]
+
+def get_deserializer(format):
+ if not _serializers:
+ _load_serializers()
+ return _serializers[format].Deserializer
+
+def serialize(format, queryset, **options):
+ """
+ Serialize a queryset (or any iterator that returns database objects) using
+ a certain serializer.
+ """
+ s = get_serializer(format)()
+ s.serialize(queryset, **options)
+ return s.getvalue()
+
+def deserialize(format, stream_or_string, **options):
+ """
+ Deserialize a stream or a string. Returns an iterator that yields ``(obj,
+ m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
+ object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
+ list_of_related_objects}``.
+ """
+ d = get_deserializer(format)
+ return d(stream_or_string, **options)
+
+def _load_serializers():
+ """
+ Register built-in and settings-defined serializers. This is done lazily so
+ that user code has a chance to (e.g.) set up custom settings without
+ needing to be careful of import order.
+ """
+ global _serializers
+ serializers = {}
+ for format in BUILTIN_SERIALIZERS:
+ register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
+ if hasattr(settings, "SERIALIZATION_MODULES"):
+ for format in settings.SERIALIZATION_MODULES:
+ register_serializer(format, settings.SERIALIZATION_MODULES[format], serializers)
+ _serializers = serializers
diff --git a/google/appengine/_internal/django/core/serializers/base.py b/google/appengine/_internal/django/core/serializers/base.py
new file mode 100644
index 0000000..137f83b
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/base.py
@@ -0,0 +1,172 @@
+"""
+Module for abstract serializer/unserializer base classes.
+"""
+
+from StringIO import StringIO
+
+from google.appengine._internal.django.db import models
+from google.appengine._internal.django.utils.encoding import smart_str, smart_unicode
+from google.appengine._internal.django.utils import datetime_safe
+
+class SerializationError(Exception):
+ """Something bad happened during serialization."""
+ pass
+
+class DeserializationError(Exception):
+ """Something bad happened during deserialization."""
+ pass
+
+class Serializer(object):
+ """
+ Abstract serializer base class.
+ """
+
+ # Indicates if the implemented serializer is only available for
+ # internal Django use.
+ internal_use_only = False
+
+ def serialize(self, queryset, **options):
+ """
+ Serialize a queryset.
+ """
+ self.options = options
+
+ self.stream = options.pop("stream", StringIO())
+ self.selected_fields = options.pop("fields", None)
+ self.use_natural_keys = options.pop("use_natural_keys", False)
+
+ self.start_serialization()
+ for obj in queryset:
+ self.start_object(obj)
+ for field in obj._meta.local_fields:
+ if field.serialize:
+ if field.rel is None:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_field(obj, field)
+ else:
+ if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
+ self.handle_fk_field(obj, field)
+ for field in obj._meta.many_to_many:
+ if field.serialize:
+ if self.selected_fields is None or field.attname in self.selected_fields:
+ self.handle_m2m_field(obj, field)
+ self.end_object(obj)
+ self.end_serialization()
+ return self.getvalue()
+
+ def get_string_value(self, obj, field):
+ """
+ Convert a field's value to a string.
+ """
+ return smart_unicode(field.value_to_string(obj))
+
+ def start_serialization(self):
+ """
+ Called when serializing of the queryset starts.
+ """
+ raise NotImplementedError
+
+ def end_serialization(self):
+ """
+ Called when serializing of the queryset ends.
+ """
+ pass
+
+ def start_object(self, obj):
+ """
+ Called when serializing of an object starts.
+ """
+ raise NotImplementedError
+
+ def end_object(self, obj):
+ """
+ Called when serializing of an object ends.
+ """
+ pass
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each individual (non-relational) field on an object.
+ """
+ raise NotImplementedError
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey field.
+ """
+ raise NotImplementedError
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField.
+ """
+ raise NotImplementedError
+
+ def getvalue(self):
+ """
+ Return the fully serialized queryset (or None if the output stream is
+ not seekable).
+ """
+ if callable(getattr(self.stream, 'getvalue', None)):
+ return self.stream.getvalue()
+
+class Deserializer(object):
+ """
+ Abstract base deserializer class.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ """
+ Init this serializer given a stream or a string
+ """
+ self.options = options
+ if isinstance(stream_or_string, basestring):
+ self.stream = StringIO(stream_or_string)
+ else:
+ self.stream = stream_or_string
+ # hack to make sure that the models have all been loaded before
+ # deserialization starts (otherwise subclass calls to get_model()
+ # and friends might fail...)
+ models.get_apps()
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ """Iteration iterface -- return the next item in the stream"""
+ raise NotImplementedError
+
+class DeserializedObject(object):
+ """
+ A deserialized model.
+
+ Basically a container for holding the pre-saved deserialized data along
+ with the many-to-many data saved with the object.
+
+ Call ``save()`` to save the object (with the many-to-many data) to the
+ database; call ``save(save_m2m=False)`` to save just the object fields
+ (and not touch the many-to-many stuff.)
+ """
+
+ def __init__(self, obj, m2m_data=None):
+ self.object = obj
+ self.m2m_data = m2m_data
+
+ def __repr__(self):
+ return "<DeserializedObject: %s.%s(pk=%s)>" % (
+ self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
+
+ def save(self, save_m2m=True, using=None):
+ # Call save on the Model baseclass directly. This bypasses any
+ # model-defined save. The save is also forced to be raw.
+ # This ensures that the data that is deserialized is literally
+ # what came from the file, not post-processed by pre_save/save
+ # methods.
+ models.Model.save_base(self.object, using=using, raw=True)
+ if self.m2m_data and save_m2m:
+ for accessor_name, object_list in self.m2m_data.items():
+ setattr(self.object, accessor_name, object_list)
+
+ # prevent a second (possibly accidental) call to save() from saving
+ # the m2m data twice.
+ self.m2m_data = None
diff --git a/google/appengine/_internal/django/core/serializers/json.py b/google/appengine/_internal/django/core/serializers/json.py
new file mode 100644
index 0000000..a576de1
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/json.py
@@ -0,0 +1,62 @@
+"""
+Serialize data to/from JSON
+"""
+
+import datetime
+import decimal
+from StringIO import StringIO
+
+from google.appengine._internal.django.core.serializers.python import Serializer as PythonSerializer
+from google.appengine._internal.django.core.serializers.python import Deserializer as PythonDeserializer
+from google.appengine._internal.django.utils import datetime_safe
+from google.appengine._internal.django.utils import simplejson
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to JSON.
+ """
+ internal_use_only = False
+
+ def end_serialization(self):
+ simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options)
+
+ def getvalue(self):
+ if callable(getattr(self.stream, 'getvalue', None)):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of JSON data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(simplejson.load(stream), **options):
+ yield obj
+
+class DjangoJSONEncoder(simplejson.JSONEncoder):
+ """
+ JSONEncoder subclass that knows how to encode date/time and decimal types.
+ """
+
+ DATE_FORMAT = "%Y-%m-%d"
+ TIME_FORMAT = "%H:%M:%S"
+
+ def default(self, o):
+ if isinstance(o, datetime.datetime):
+ d = datetime_safe.new_datetime(o)
+ return d.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
+ elif isinstance(o, datetime.date):
+ d = datetime_safe.new_date(o)
+ return d.strftime(self.DATE_FORMAT)
+ elif isinstance(o, datetime.time):
+ return o.strftime(self.TIME_FORMAT)
+ elif isinstance(o, decimal.Decimal):
+ return str(o)
+ else:
+ return super(DjangoJSONEncoder, self).default(o)
+
+# Older, deprecated class name (for backwards compatibility purposes).
+DateTimeAwareJSONEncoder = DjangoJSONEncoder
+
diff --git a/google/appengine/_internal/django/core/serializers/python.py b/google/appengine/_internal/django/core/serializers/python.py
new file mode 100644
index 0000000..bc7c2fa
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/python.py
@@ -0,0 +1,142 @@
+"""
+A Python "serializer". Doesn't do much serializing per se -- just converts to
+and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
+other serializers.
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.serializers import base
+from google.appengine._internal.django.db import models, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.utils.encoding import smart_unicode, is_protected_type
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to basic Python objects.
+ """
+
+ internal_use_only = True
+
+ def start_serialization(self):
+ self._current = None
+ self.objects = []
+
+ def end_serialization(self):
+ pass
+
+ def start_object(self, obj):
+ self._current = {}
+
+ def end_object(self, obj):
+ self.objects.append({
+ "model" : smart_unicode(obj._meta),
+ "pk" : smart_unicode(obj._get_pk_val(), strings_only=True),
+ "fields" : self._current
+ })
+ self._current = None
+
+ def handle_field(self, obj, field):
+ value = field._get_val_from_obj(obj)
+ # Protected types (i.e., primitives like None, numbers, dates,
+ # and Decimals) are passed through as is. All other values are
+ # converted to string first.
+ if is_protected_type(value):
+ self._current[field.name] = value
+ else:
+ self._current[field.name] = field.value_to_string(obj)
+
+ def handle_fk_field(self, obj, field):
+ related = getattr(obj, field.name)
+ if related is not None:
+ if self.use_natural_keys and hasattr(related, 'natural_key'):
+ related = related.natural_key()
+ else:
+ if field.rel.field_name == related._meta.pk.name:
+ # Related to remote object via primary key
+ related = related._get_pk_val()
+ else:
+ # Related to remote object via other field
+ related = smart_unicode(getattr(related, field.rel.field_name), strings_only=True)
+ self._current[field.name] = related
+
+ def handle_m2m_field(self, obj, field):
+ if field.rel.through._meta.auto_created:
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ m2m_value = lambda value: value.natural_key()
+ else:
+ m2m_value = lambda value: smart_unicode(value._get_pk_val(), strings_only=True)
+ self._current[field.name] = [m2m_value(related)
+ for related in getattr(obj, field.name).iterator()]
+
+ def getvalue(self):
+ return self.objects
+
+def Deserializer(object_list, **options):
+ """
+ Deserialize simple Python objects back into Django ORM instances.
+
+ It's expected that you pass the Python objects themselves (instead of a
+ stream or a string) to the constructor
+ """
+ db = options.pop('using', DEFAULT_DB_ALIAS)
+ models.get_apps()
+ for d in object_list:
+ # Look up the model and starting build a dict of data for it.
+ Model = _get_model(d["model"])
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
+ m2m_data = {}
+
+ # Handle each field
+ for (field_name, field_value) in d["fields"].iteritems():
+ if isinstance(field_value, str):
+ field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
+
+ field = Model._meta.get_field(field_name)
+
+ # Handle M2M relations
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ def m2m_convert(value):
+ if hasattr(value, '__iter__'):
+ return field.rel.to._default_manager.db_manager(db).get_by_natural_key(*value).pk
+ else:
+ return smart_unicode(field.rel.to._meta.pk.to_python(value))
+ else:
+ m2m_convert = lambda v: smart_unicode(field.rel.to._meta.pk.to_python(v))
+ m2m_data[field.name] = [m2m_convert(pk) for pk in field_value]
+
+ # Handle FK fields
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ if field_value is not None:
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ if hasattr(field_value, '__iter__'):
+ obj = field.rel.to._default_manager.db_manager(db).get_by_natural_key(*field_value)
+ value = getattr(obj, field.rel.field_name)
+ # If this is a natural foreign key to an object that
+ # has a FK/O2O as the foreign key, use the FK value
+ if field.rel.to._meta.pk.rel:
+ value = value.pk
+ else:
+ value = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ data[field.attname] = value
+ else:
+ data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ else:
+ data[field.attname] = None
+
+ # Handle all other fields
+ else:
+ data[field.name] = field.to_python(field_value)
+
+ yield base.DeserializedObject(Model(**data), m2m_data)
+
+def _get_model(model_identifier):
+ """
+ Helper to look up a model from an "app_label.module_name" string.
+ """
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(u"Invalid model identifier: '%s'" % model_identifier)
+ return Model
diff --git a/google/appengine/_internal/django/core/serializers/pyyaml.py b/google/appengine/_internal/django/core/serializers/pyyaml.py
new file mode 100644
index 0000000..12d8a9b
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/pyyaml.py
@@ -0,0 +1,56 @@
+"""
+YAML serializer.
+
+Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
+"""
+
+from StringIO import StringIO
+import decimal
+import yaml
+
+from google.appengine._internal.django.db import models
+from google.appengine._internal.django.core.serializers.python import Serializer as PythonSerializer
+from google.appengine._internal.django.core.serializers.python import Deserializer as PythonDeserializer
+
+class DjangoSafeDumper(yaml.SafeDumper):
+ def represent_decimal(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:str', str(data))
+
+DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
+
+class Serializer(PythonSerializer):
+ """
+ Convert a queryset to YAML.
+ """
+
+ internal_use_only = False
+
+ def handle_field(self, obj, field):
+ # A nasty special case: base YAML doesn't support serialization of time
+ # types (as opposed to dates or datetimes, which it does support). Since
+ # we want to use the "safe" serializer for better interoperability, we
+ # need to do something with those pesky times. Converting 'em to strings
+ # isn't perfect, but it's better than a "!!python/time" type which would
+ # halt deserialization under any other language.
+ if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
+ self._current[field.name] = str(getattr(obj, field.name))
+ else:
+ super(Serializer, self).handle_field(obj, field)
+
+ def end_serialization(self):
+ yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
+
+ def getvalue(self):
+ return self.stream.getvalue()
+
+def Deserializer(stream_or_string, **options):
+ """
+ Deserialize a stream or string of YAML data.
+ """
+ if isinstance(stream_or_string, basestring):
+ stream = StringIO(stream_or_string)
+ else:
+ stream = stream_or_string
+ for obj in PythonDeserializer(yaml.load(stream), **options):
+ yield obj
+
diff --git a/google/appengine/_internal/django/core/serializers/xml_serializer.py b/google/appengine/_internal/django/core/serializers/xml_serializer.py
new file mode 100644
index 0000000..c198fe4
--- /dev/null
+++ b/google/appengine/_internal/django/core/serializers/xml_serializer.py
@@ -0,0 +1,295 @@
+"""
+XML serializer.
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.serializers import base
+from google.appengine._internal.django.db import models, DEFAULT_DB_ALIAS
+from google.appengine._internal.django.utils.xmlutils import SimplerXMLGenerator
+from google.appengine._internal.django.utils.encoding import smart_unicode
+from xml.dom import pulldom
+
+class Serializer(base.Serializer):
+ """
+ Serializes a QuerySet to XML.
+ """
+
+ def indent(self, level):
+ if self.options.get('indent', None) is not None:
+ self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
+
+ def start_serialization(self):
+ """
+ Start serialization -- open the XML document and the root element.
+ """
+ self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
+ self.xml.startDocument()
+ self.xml.startElement("django-objects", {"version" : "1.0"})
+
+ def end_serialization(self):
+ """
+ End serialization -- end the document.
+ """
+ self.indent(0)
+ self.xml.endElement("django-objects")
+ self.xml.endDocument()
+
+ def start_object(self, obj):
+ """
+ Called as each object is handled.
+ """
+ if not hasattr(obj, "_meta"):
+ raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
+
+ self.indent(1)
+ obj_pk = obj._get_pk_val()
+ if obj_pk is None:
+ attrs = {"model": smart_unicode(obj._meta),}
+ else:
+ attrs = {
+ "pk": smart_unicode(obj._get_pk_val()),
+ "model": smart_unicode(obj._meta),
+ }
+
+ self.xml.startElement("object", attrs)
+
+ def end_object(self, obj):
+ """
+ Called after handling all fields for an object.
+ """
+ self.indent(1)
+ self.xml.endElement("object")
+
+ def handle_field(self, obj, field):
+ """
+ Called to handle each field on an object (except for ForeignKeys and
+ ManyToManyFields)
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "type" : field.get_internal_type()
+ })
+
+ # Get a "string version" of the object's data.
+ if getattr(obj, field.name) is not None:
+ self.xml.characters(field.value_to_string(obj))
+ else:
+ self.xml.addQuickElement("None")
+
+ self.xml.endElement("field")
+
+ def handle_fk_field(self, obj, field):
+ """
+ Called to handle a ForeignKey (we need to treat them slightly
+ differently from regular fields).
+ """
+ self._start_relational_field(field)
+ related = getattr(obj, field.name)
+ if related is not None:
+ if self.use_natural_keys and hasattr(related, 'natural_key'):
+ # If related object has a natural key, use it
+ related = related.natural_key()
+ # Iterable natural keys are rolled out as subelements
+ for key_value in related:
+ self.xml.startElement("natural", {})
+ self.xml.characters(smart_unicode(key_value))
+ self.xml.endElement("natural")
+ else:
+ if field.rel.field_name == related._meta.pk.name:
+ # Related to remote object via primary key
+ related = related._get_pk_val()
+ else:
+ # Related to remote object via other field
+ related = getattr(related, field.rel.field_name)
+ self.xml.characters(smart_unicode(related))
+ else:
+ self.xml.addQuickElement("None")
+ self.xml.endElement("field")
+
+ def handle_m2m_field(self, obj, field):
+ """
+ Called to handle a ManyToManyField. Related objects are only
+ serialized as references to the object's PK (i.e. the related *data*
+ is not dumped, just the relation).
+ """
+ if field.rel.through._meta.auto_created:
+ self._start_relational_field(field)
+ if self.use_natural_keys and hasattr(field.rel.to, 'natural_key'):
+ # If the objects in the m2m have a natural key, use it
+ def handle_m2m(value):
+ natural = value.natural_key()
+ # Iterable natural keys are rolled out as subelements
+ self.xml.startElement("object", {})
+ for key_value in natural:
+ self.xml.startElement("natural", {})
+ self.xml.characters(smart_unicode(key_value))
+ self.xml.endElement("natural")
+ self.xml.endElement("object")
+ else:
+ def handle_m2m(value):
+ self.xml.addQuickElement("object", attrs={
+ 'pk' : smart_unicode(value._get_pk_val())
+ })
+ for relobj in getattr(obj, field.name).iterator():
+ handle_m2m(relobj)
+
+ self.xml.endElement("field")
+
+ def _start_relational_field(self, field):
+ """
+ Helper to output the <field> element for relational fields
+ """
+ self.indent(2)
+ self.xml.startElement("field", {
+ "name" : field.name,
+ "rel" : field.rel.__class__.__name__,
+ "to" : smart_unicode(field.rel.to._meta),
+ })
+
+class Deserializer(base.Deserializer):
+ """
+ Deserialize XML.
+ """
+
+ def __init__(self, stream_or_string, **options):
+ super(Deserializer, self).__init__(stream_or_string, **options)
+ self.event_stream = pulldom.parse(self.stream)
+ self.db = options.pop('using', DEFAULT_DB_ALIAS)
+
+ def next(self):
+ for event, node in self.event_stream:
+ if event == "START_ELEMENT" and node.nodeName == "object":
+ self.event_stream.expandNode(node)
+ return self._handle_object(node)
+ raise StopIteration
+
+ def _handle_object(self, node):
+ """
+ Convert an <object> node to a DeserializedObject.
+ """
+ # Look up the model using the model loading mechanism. If this fails,
+ # bail.
+ Model = self._get_model_from_node(node, "model")
+
+ # Start building a data dictionary from the object.
+ # If the node is missing the pk set it to None
+ if node.hasAttribute("pk"):
+ pk = node.getAttribute("pk")
+ else:
+ pk = None
+
+ data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
+
+ # Also start building a dict of m2m data (this is saved as
+ # {m2m_accessor_attribute : [list_of_related_objects]})
+ m2m_data = {}
+
+ # Deseralize each field.
+ for field_node in node.getElementsByTagName("field"):
+ # If the field is missing the name attribute, bail (are you
+ # sensing a pattern here?)
+ field_name = field_node.getAttribute("name")
+ if not field_name:
+ raise base.DeserializationError("<field> node is missing the 'name' attribute")
+
+ # Get the field from the Model. This will raise a
+ # FieldDoesNotExist if, well, the field doesn't exist, which will
+ # be propagated correctly.
+ field = Model._meta.get_field(field_name)
+
+ # As is usually the case, relation fields get the special treatment.
+ if field.rel and isinstance(field.rel, models.ManyToManyRel):
+ m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
+ elif field.rel and isinstance(field.rel, models.ManyToOneRel):
+ data[field.attname] = self._handle_fk_field_node(field_node, field)
+ else:
+ if field_node.getElementsByTagName('None'):
+ value = None
+ else:
+ value = field.to_python(getInnerText(field_node).strip())
+ data[field.name] = value
+
+ # Return a DeserializedObject so that the m2m data has a place to live.
+ return base.DeserializedObject(Model(**data), m2m_data)
+
+ def _handle_fk_field_node(self, node, field):
+ """
+ Handle a <field> node for a ForeignKey
+ """
+ # Check if there is a child node named 'None', returning None if so.
+ if node.getElementsByTagName('None'):
+ return None
+ else:
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ keys = node.getElementsByTagName('natural')
+ if keys:
+ # If there are 'natural' subelements, it must be a natural key
+ field_value = [getInnerText(k).strip() for k in keys]
+ obj = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
+ obj_pk = getattr(obj, field.rel.field_name)
+ # If this is a natural foreign key to an object that
+ # has a FK/O2O as the foreign key, use the FK value
+ if field.rel.to._meta.pk.rel:
+ obj_pk = obj_pk.pk
+ else:
+ # Otherwise, treat like a normal PK
+ field_value = getInnerText(node).strip()
+ obj_pk = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+ return obj_pk
+ else:
+ field_value = getInnerText(node).strip()
+ return field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
+
+ def _handle_m2m_field_node(self, node, field):
+ """
+ Handle a <field> node for a ManyToManyField.
+ """
+ if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
+ def m2m_convert(n):
+ keys = n.getElementsByTagName('natural')
+ if keys:
+ # If there are 'natural' subelements, it must be a natural key
+ field_value = [getInnerText(k).strip() for k in keys]
+ obj_pk = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
+ else:
+ # Otherwise, treat like a normal PK value.
+ obj_pk = field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
+ return obj_pk
+ else:
+ m2m_convert = lambda n: field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
+ return [m2m_convert(c) for c in node.getElementsByTagName("object")]
+
+ def _get_model_from_node(self, node, attr):
+ """
+ Helper to look up a model from a <object model=...> or a <field
+ rel=... to=...> node.
+ """
+ model_identifier = node.getAttribute(attr)
+ if not model_identifier:
+ raise base.DeserializationError(
+ "<%s> node is missing the required '%s' attribute" % (node.nodeName, attr))
+ try:
+ Model = models.get_model(*model_identifier.split("."))
+ except TypeError:
+ Model = None
+ if Model is None:
+ raise base.DeserializationError(
+ "<%s> node has invalid model identifier: '%s'" % (node.nodeName, model_identifier))
+ return Model
+
+
+def getInnerText(node):
+ """
+ Get all the inner text of a DOM node (recursively).
+ """
+ # inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
+ inner_text = []
+ for child in node.childNodes:
+ if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
+ inner_text.append(child.data)
+ elif child.nodeType == child.ELEMENT_NODE:
+ inner_text.extend(getInnerText(child))
+ else:
+ pass
+ return u"".join(inner_text)
diff --git a/google/appengine/_internal/django/core/servers/__init__.py b/google/appengine/_internal/django/core/servers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/core/servers/__init__.py
diff --git a/google/appengine/_internal/django/core/servers/basehttp.py b/google/appengine/_internal/django/core/servers/basehttp.py
new file mode 100644
index 0000000..953d2df
--- /dev/null
+++ b/google/appengine/_internal/django/core/servers/basehttp.py
@@ -0,0 +1,719 @@
+"""
+BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21).
+
+Adapted from wsgiref.simple_server: http://svn.eby-sarna.com/wsgiref/
+
+This is a simple server for use in testing or debugging Django apps. It hasn't
+been reviewed for security issues. Don't use it for production use.
+"""
+
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+import mimetypes
+import os
+import re
+import stat
+import sys
+import urllib
+
+from google.appengine._internal.django.core.management.color import color_style
+from google.appengine._internal.django.utils.http import http_date
+from google.appengine._internal.django.utils._os import safe_join
+
+__version__ = "0.1"
+__all__ = ['WSGIServer','WSGIRequestHandler']
+
+server_version = "WSGIServer/" + __version__
+sys_version = "Python/" + sys.version.split()[0]
+software_version = server_version + ' ' + sys_version
+
+class WSGIServerException(Exception):
+ pass
+
+class FileWrapper(object):
+ """Wrapper to convert file-like objects to iterables"""
+
+ def __init__(self, filelike, blksize=8192):
+ self.filelike = filelike
+ self.blksize = blksize
+ if hasattr(filelike,'close'):
+ self.close = filelike.close
+
+ def __getitem__(self,key):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise IndexError
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ data = self.filelike.read(self.blksize)
+ if data:
+ return data
+ raise StopIteration
+
+# Regular expression that matches `special' characters in parameters, the
+# existence of which force quoting of the parameter value.
+tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
+
+def _formatparam(param, value=None, quote=1):
+ """Convenience function to format and return a key=value pair.
+
+ This will quote the value if needed or if quote is true.
+ """
+ if value is not None and len(value) > 0:
+ if quote or tspecials.search(value):
+ value = value.replace('\\', '\\\\').replace('"', r'\"')
+ return '%s="%s"' % (param, value)
+ else:
+ return '%s=%s' % (param, value)
+ else:
+ return param
+
+class Headers(object):
+ """Manage a collection of HTTP response headers"""
+ def __init__(self,headers):
+ if not isinstance(headers, list):
+ raise TypeError("Headers must be a list of name/value tuples")
+ self._headers = headers
+
+ def __len__(self):
+ """Return the total number of headers, including duplicates."""
+ return len(self._headers)
+
+ def __setitem__(self, name, val):
+ """Set the value of a header."""
+ del self[name]
+ self._headers.append((name, val))
+
+ def __delitem__(self,name):
+ """Delete all occurrences of a header, if present.
+
+ Does *not* raise an exception if the header is missing.
+ """
+ name = name.lower()
+ self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name]
+
+ def __getitem__(self,name):
+ """Get the first header value for 'name'
+
+ Return None if the header is missing instead of raising an exception.
+
+ Note that if the header appeared multiple times, the first exactly which
+ occurrance gets returned is undefined. Use getall() to get all
+ the values matching a header field name.
+ """
+ return self.get(name)
+
+ def has_key(self, name):
+ """Return true if the message contains the header."""
+ return self.get(name) is not None
+
+ __contains__ = has_key
+
+ def get_all(self, name):
+ """Return a list of all the values for the named field.
+
+ These will be sorted in the order they appeared in the original header
+ list or were added to this instance, and may contain duplicates. Any
+ fields deleted and re-inserted are always appended to the header list.
+ If no fields exist with the given name, returns an empty list.
+ """
+ name = name.lower()
+ return [kv[1] for kv in self._headers if kv[0].lower()==name]
+
+
+ def get(self,name,default=None):
+ """Get the first header value for 'name', or return 'default'"""
+ name = name.lower()
+ for k,v in self._headers:
+ if k.lower()==name:
+ return v
+ return default
+
+ def keys(self):
+ """Return a list of all the header field names.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [k for k, v in self._headers]
+
+ def values(self):
+ """Return a list of all header values.
+
+ These will be sorted in the order they appeared in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return [v for k, v in self._headers]
+
+ def items(self):
+ """Get all the header fields and values.
+
+ These will be sorted in the order they were in the original header
+ list, or were added to this instance, and may contain duplicates.
+ Any fields deleted and re-inserted are always appended to the header
+ list.
+ """
+ return self._headers[:]
+
+ def __repr__(self):
+ return "Headers(%s)" % `self._headers`
+
+ def __str__(self):
+ """str() returns the formatted headers, complete with end line,
+ suitable for direct HTTP transmission."""
+ return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
+
+ def setdefault(self,name,value):
+ """Return first matching header value for 'name', or 'value'
+
+ If there is no header named 'name', add a new header with name 'name'
+ and value 'value'."""
+ result = self.get(name)
+ if result is None:
+ self._headers.append((name,value))
+ return value
+ else:
+ return result
+
+ def add_header(self, _name, _value, **_params):
+ """Extended header setting.
+
+ _name is the header field to add. keyword arguments can be used to set
+ additional parameters for the header field, with underscores converted
+ to dashes. Normally the parameter will be added as key="value" unless
+ value is None, in which case only the key will be added.
+
+ Example:
+
+ h.add_header('content-disposition', 'attachment', filename='bud.gif')
+
+ Note that unlike the corresponding 'email.Message' method, this does
+ *not* handle '(charset, language, value)' tuples: all values must be
+ strings or None.
+ """
+ parts = []
+ if _value is not None:
+ parts.append(_value)
+ for k, v in _params.items():
+ if v is None:
+ parts.append(k.replace('_', '-'))
+ else:
+ parts.append(_formatparam(k.replace('_', '-'), v))
+ self._headers.append((_name, "; ".join(parts)))
+
+def guess_scheme(environ):
+ """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
+ """
+ if environ.get("HTTPS") in ('yes','on','1'):
+ return 'https'
+ else:
+ return 'http'
+
+_hop_headers = {
+ 'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
+ 'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
+ 'upgrade':1
+}
+
+def is_hop_by_hop(header_name):
+ """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
+ return header_name.lower() in _hop_headers
+
+class ServerHandler(object):
+ """Manage the invocation of a WSGI application"""
+
+ # Configuration parameters; can override per-subclass or per-instance
+ wsgi_version = (1,0)
+ wsgi_multithread = True
+ wsgi_multiprocess = True
+ wsgi_run_once = False
+
+ origin_server = True # We are transmitting direct to client
+ http_version = "1.0" # Version that should be used for response
+ server_software = software_version
+
+ # os_environ is used to supply configuration from the OS environment:
+ # by default it's a copy of 'os.environ' as of import time, but you can
+ # override this in e.g. your __init__ method.
+ os_environ = dict(os.environ.items())
+
+ # Collaborator classes
+ wsgi_file_wrapper = FileWrapper # set to None to disable
+ headers_class = Headers # must be a Headers-like class
+
+ # Error handling (also per-subclass or per-instance)
+ traceback_limit = None # Print entire traceback to self.get_stderr()
+ error_status = "500 INTERNAL SERVER ERROR"
+ error_headers = [('Content-Type','text/plain')]
+
+ # State variables (don't mess with these)
+ status = result = None
+ headers_sent = False
+ headers = None
+ bytes_sent = 0
+
+ def __init__(self, stdin, stdout, stderr, environ, multithread=True,
+ multiprocess=False):
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.base_env = environ
+ self.wsgi_multithread = multithread
+ self.wsgi_multiprocess = multiprocess
+
+ def run(self, application):
+ """Invoke the application"""
+ # Note to self: don't move the close()! Asynchronous servers shouldn't
+ # call close() from finish_response(), so if you close() anywhere but
+ # the double-error branch here, you'll break asynchronous servers by
+ # prematurely closing. Async servers must return from 'run()' without
+ # closing if there might still be output to iterate over.
+ try:
+ self.setup_environ()
+ self.result = application(self.environ, self.start_response)
+ self.finish_response()
+ except:
+ try:
+ self.handle_error()
+ except:
+ # If we get an error handling an error, just give up already!
+ self.close()
+ raise # ...and let the actual server figure it out.
+
+ def setup_environ(self):
+ """Set up the environment for one request"""
+
+ env = self.environ = self.os_environ.copy()
+ self.add_cgi_vars()
+
+ env['wsgi.input'] = self.get_stdin()
+ env['wsgi.errors'] = self.get_stderr()
+ env['wsgi.version'] = self.wsgi_version
+ env['wsgi.run_once'] = self.wsgi_run_once
+ env['wsgi.url_scheme'] = self.get_scheme()
+ env['wsgi.multithread'] = self.wsgi_multithread
+ env['wsgi.multiprocess'] = self.wsgi_multiprocess
+
+ if self.wsgi_file_wrapper is not None:
+ env['wsgi.file_wrapper'] = self.wsgi_file_wrapper
+
+ if self.origin_server and self.server_software:
+ env.setdefault('SERVER_SOFTWARE',self.server_software)
+
+ def finish_response(self):
+ """
+ Send any iterable data, then close self and the iterable
+
+ Subclasses intended for use in asynchronous servers will want to
+ redefine this method, such that it sets up callbacks in the event loop
+ to iterate over the data, and to call 'self.close()' once the response
+ is finished.
+ """
+ if not self.result_is_file() or not self.sendfile():
+ for data in self.result:
+ self.write(data)
+ self.finish_content()
+ self.close()
+
+ def get_scheme(self):
+ """Return the URL scheme being used"""
+ return guess_scheme(self.environ)
+
+ def set_content_length(self):
+ """Compute Content-Length or switch to chunked encoding if possible"""
+ try:
+ blocks = len(self.result)
+ except (TypeError, AttributeError, NotImplementedError):
+ pass
+ else:
+ if blocks==1:
+ self.headers['Content-Length'] = str(self.bytes_sent)
+ return
+ # XXX Try for chunked encoding if origin server and client is 1.1
+
+ def cleanup_headers(self):
+ """Make any necessary header changes or defaults
+
+ Subclasses can extend this to add other defaults.
+ """
+ if 'Content-Length' not in self.headers:
+ self.set_content_length()
+
+ def start_response(self, status, headers,exc_info=None):
+ """'start_response()' callable as specified by PEP 333"""
+
+ if exc_info:
+ try:
+ if self.headers_sent:
+ # Re-raise original exception if headers sent
+ raise exc_info[0], exc_info[1], exc_info[2]
+ finally:
+ exc_info = None # avoid dangling circular ref
+ elif self.headers is not None:
+ raise AssertionError("Headers already set!")
+
+ assert isinstance(status, str),"Status must be a string"
+ assert len(status)>=4,"Status must be at least 4 characters"
+ assert int(status[:3]),"Status message must begin w/3-digit code"
+ assert status[3]==" ", "Status message must have a space after code"
+ if __debug__:
+ for name,val in headers:
+ assert isinstance(name, str),"Header names must be strings"
+ assert isinstance(val, str),"Header values must be strings"
+ assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed"
+ self.status = status
+ self.headers = self.headers_class(headers)
+ return self.write
+
+ def send_preamble(self):
+ """Transmit version/status/date/server, via self._write()"""
+ if self.origin_server:
+ if self.client_is_modern():
+ self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
+ if 'Date' not in self.headers:
+ self._write(
+ 'Date: %s\r\n' % http_date()
+ )
+ if self.server_software and 'Server' not in self.headers:
+ self._write('Server: %s\r\n' % self.server_software)
+ else:
+ self._write('Status: %s\r\n' % self.status)
+
+ def write(self, data):
+ """'write()' callable as specified by PEP 333"""
+
+ assert isinstance(data, str), "write() argument must be string"
+
+ if not self.status:
+ raise AssertionError("write() before start_response()")
+
+ elif not self.headers_sent:
+ # Before the first output, send the stored headers
+ self.bytes_sent = len(data) # make sure we know content-length
+ self.send_headers()
+ else:
+ self.bytes_sent += len(data)
+
+ # XXX check Content-Length and truncate if too many bytes written?
+
+ # If data is too large, socket will choke, so write chunks no larger
+ # than 32MB at a time.
+ length = len(data)
+ if length > 33554432:
+ offset = 0
+ while offset < length:
+ chunk_size = min(33554432, length)
+ self._write(data[offset:offset+chunk_size])
+ self._flush()
+ offset += chunk_size
+ else:
+ self._write(data)
+ self._flush()
+
+ def sendfile(self):
+ """Platform-specific file transmission
+
+ Override this method in subclasses to support platform-specific
+ file transmission. It is only called if the application's
+ return iterable ('self.result') is an instance of
+ 'self.wsgi_file_wrapper'.
+
+ This method should return a true value if it was able to actually
+ transmit the wrapped file-like object using a platform-specific
+ approach. It should return a false value if normal iteration
+ should be used instead. An exception can be raised to indicate
+ that transmission was attempted, but failed.
+
+ NOTE: this method should call 'self.send_headers()' if
+ 'self.headers_sent' is false and it is going to attempt direct
+ transmission of the file1.
+ """
+ return False # No platform-specific transmission by default
+
+ def finish_content(self):
+ """Ensure headers and content have both been sent"""
+ if not self.headers_sent:
+ self.headers['Content-Length'] = "0"
+ self.send_headers()
+ else:
+ pass # XXX check if content-length was too short?
+
+ def close(self):
+ try:
+ self.request_handler.log_request(self.status.split(' ',1)[0], self.bytes_sent)
+ finally:
+ try:
+ if hasattr(self.result,'close'):
+ self.result.close()
+ finally:
+ self.result = self.headers = self.status = self.environ = None
+ self.bytes_sent = 0; self.headers_sent = False
+
+ def send_headers(self):
+ """Transmit headers to the client, via self._write()"""
+ self.cleanup_headers()
+ self.headers_sent = True
+ if not self.origin_server or self.client_is_modern():
+ self.send_preamble()
+ self._write(str(self.headers))
+
+ def result_is_file(self):
+ """True if 'self.result' is an instance of 'self.wsgi_file_wrapper'"""
+ wrapper = self.wsgi_file_wrapper
+ return wrapper is not None and isinstance(self.result,wrapper)
+
+ def client_is_modern(self):
+ """True if client can accept status and headers"""
+ return self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9'
+
+ def log_exception(self,exc_info):
+ """Log the 'exc_info' tuple in the server log
+
+ Subclasses may override to retarget the output or change its format.
+ """
+ try:
+ from traceback import print_exception
+ stderr = self.get_stderr()
+ print_exception(
+ exc_info[0], exc_info[1], exc_info[2],
+ self.traceback_limit, stderr
+ )
+ stderr.flush()
+ finally:
+ exc_info = None
+
+ def handle_error(self):
+ """Log current error, and send error output to client if possible"""
+ self.log_exception(sys.exc_info())
+ if not self.headers_sent:
+ self.result = self.error_output(self.environ, self.start_response)
+ self.finish_response()
+ # XXX else: attempt advanced recovery techniques for HTML or text?
+
+ def error_output(self, environ, start_response):
+ import traceback
+ start_response(self.error_status, self.error_headers[:], sys.exc_info())
+ return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
+
+ # Pure abstract methods; *must* be overridden in subclasses
+
+ def _write(self,data):
+ self.stdout.write(data)
+ self._write = self.stdout.write
+
+ def _flush(self):
+ self.stdout.flush()
+ self._flush = self.stdout.flush
+
+ def get_stdin(self):
+ return self.stdin
+
+ def get_stderr(self):
+ return self.stderr
+
+ def add_cgi_vars(self):
+ self.environ.update(self.base_env)
+
+class WSGIServer(HTTPServer):
+ """BaseHTTPServer that implements the Python WSGI protocol"""
+ application = None
+
+ def server_bind(self):
+ """Override server_bind to store the server name."""
+ try:
+ HTTPServer.server_bind(self)
+ except Exception, e:
+ raise WSGIServerException(e)
+ self.setup_environ()
+
+ def setup_environ(self):
+ # Set up base environment
+ env = self.base_environ = {}
+ env['SERVER_NAME'] = self.server_name
+ env['GATEWAY_INTERFACE'] = 'CGI/1.1'
+ env['SERVER_PORT'] = str(self.server_port)
+ env['REMOTE_HOST']=''
+ env['CONTENT_LENGTH']=''
+ env['SCRIPT_NAME'] = ''
+
+ def get_app(self):
+ return self.application
+
+ def set_app(self,application):
+ self.application = application
+
+class WSGIRequestHandler(BaseHTTPRequestHandler):
+ server_version = "WSGIServer/" + __version__
+
+ def __init__(self, *args, **kwargs):
+ from google.appengine._internal.django.conf import settings
+ self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
+ # We set self.path to avoid crashes in log_message() on unsupported
+ # requests (like "OPTIONS").
+ self.path = ''
+ self.style = color_style()
+ BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+ def get_environ(self):
+ env = self.server.base_environ.copy()
+ env['SERVER_PROTOCOL'] = self.request_version
+ env['REQUEST_METHOD'] = self.command
+ if '?' in self.path:
+ path,query = self.path.split('?',1)
+ else:
+ path,query = self.path,''
+
+ env['PATH_INFO'] = urllib.unquote(path)
+ env['QUERY_STRING'] = query
+ env['REMOTE_ADDR'] = self.client_address[0]
+
+ if self.headers.typeheader is None:
+ env['CONTENT_TYPE'] = self.headers.type
+ else:
+ env['CONTENT_TYPE'] = self.headers.typeheader
+
+ length = self.headers.getheader('content-length')
+ if length:
+ env['CONTENT_LENGTH'] = length
+
+ for h in self.headers.headers:
+ k,v = h.split(':',1)
+ k=k.replace('-','_').upper(); v=v.strip()
+ if k in env:
+ continue # skip content length, type,etc.
+ if 'HTTP_'+k in env:
+ env['HTTP_'+k] += ','+v # comma-separate multiple headers
+ else:
+ env['HTTP_'+k] = v
+ return env
+
+ def get_stderr(self):
+ return sys.stderr
+
+ def handle(self):
+ """Handle a single HTTP request"""
+ self.raw_requestline = self.rfile.readline()
+ if not self.parse_request(): # An error code has been sent, just exit
+ return
+ handler = ServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ())
+ handler.request_handler = self # backpointer for logging
+ handler.run(self.server.get_app())
+
+ def log_message(self, format, *args):
+ # Don't bother logging requests for admin images or the favicon.
+ if self.path.startswith(self.admin_media_prefix) or self.path == '/favicon.ico':
+ return
+
+ msg = "[%s] %s\n" % (self.log_date_time_string(), format % args)
+
+ # Utilize terminal colors, if available
+ if args[1][0] == '2':
+ # Put 2XX first, since it should be the common case
+ msg = self.style.HTTP_SUCCESS(msg)
+ elif args[1][0] == '1':
+ msg = self.style.HTTP_INFO(msg)
+ elif args[1] == '304':
+ msg = self.style.HTTP_NOT_MODIFIED(msg)
+ elif args[1][0] == '3':
+ msg = self.style.HTTP_REDIRECT(msg)
+ elif args[1] == '404':
+ msg = self.style.HTTP_NOT_FOUND(msg)
+ elif args[1][0] == '4':
+ msg = self.style.HTTP_BAD_REQUEST(msg)
+ else:
+ # Any 5XX, or any other response
+ msg = self.style.HTTP_SERVER_ERROR(msg)
+
+ sys.stderr.write(msg)
+
+class AdminMediaHandler(object):
+ """
+ WSGI middleware that intercepts calls to the admin media directory, as
+ defined by the ADMIN_MEDIA_PREFIX setting, and serves those images.
+ Use this ONLY LOCALLY, for development! This hasn't been tested for
+ security and is not super efficient.
+ """
+ def __init__(self, application, media_dir=None):
+ from google.appengine._internal.django.conf import settings
+ self.application = application
+ if not media_dir:
+ import django
+ self.media_dir = os.path.join(django.__path__[0], 'contrib', 'admin', 'media')
+ else:
+ self.media_dir = media_dir
+ self.media_url = settings.ADMIN_MEDIA_PREFIX
+
+ def file_path(self, url):
+ """
+ Returns the path to the media file on disk for the given URL.
+
+ The passed URL is assumed to begin with ADMIN_MEDIA_PREFIX. If the
+ resultant file path is outside the media directory, then a ValueError
+ is raised.
+ """
+ # Remove ADMIN_MEDIA_PREFIX.
+ relative_url = url[len(self.media_url):]
+ relative_path = urllib.url2pathname(relative_url)
+ return safe_join(self.media_dir, relative_path)
+
+ def __call__(self, environ, start_response):
+ import os.path
+
+ # Ignore requests that aren't under ADMIN_MEDIA_PREFIX. Also ignore
+ # all requests if ADMIN_MEDIA_PREFIX isn't a relative URL.
+ if self.media_url.startswith('http://') or self.media_url.startswith('https://') or not environ['PATH_INFO'].startswith(self.media_url):
+ return self.application(environ, start_response)
+
+ # Find the admin file and serve it up, if it exists and is readable.
+ try:
+ file_path = self.file_path(environ['PATH_INFO'])
+ except ValueError: # Resulting file path was not valid.
+ status = '404 NOT FOUND'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Page not found: %s' % environ['PATH_INFO']]
+ start_response(status, headers.items())
+ return output
+ if not os.path.exists(file_path):
+ status = '404 NOT FOUND'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Page not found: %s' % environ['PATH_INFO']]
+ else:
+ try:
+ fp = open(file_path, 'rb')
+ except IOError:
+ status = '401 UNAUTHORIZED'
+ headers = {'Content-type': 'text/plain'}
+ output = ['Permission denied: %s' % environ['PATH_INFO']]
+ else:
+ # This is a very simple implementation of conditional GET with
+ # the Last-Modified header. It makes media files a bit speedier
+ # because the files are only read off disk for the first
+ # request (assuming the browser/client supports conditional
+ # GET).
+ mtime = http_date(os.stat(file_path)[stat.ST_MTIME])
+ headers = {'Last-Modified': mtime}
+ if environ.get('HTTP_IF_MODIFIED_SINCE', None) == mtime:
+ status = '304 NOT MODIFIED'
+ output = []
+ else:
+ status = '200 OK'
+ mime_type = mimetypes.guess_type(file_path)[0]
+ if mime_type:
+ headers['Content-Type'] = mime_type
+ output = [fp.read()]
+ fp.close()
+ start_response(status, headers.items())
+ return output
+
+def run(addr, port, wsgi_handler):
+ server_address = (addr, port)
+ httpd = WSGIServer(server_address, WSGIRequestHandler)
+ httpd.set_app(wsgi_handler)
+ httpd.serve_forever()
diff --git a/google/appengine/_internal/django/core/servers/fastcgi.py b/google/appengine/_internal/django/core/servers/fastcgi.py
new file mode 100644
index 0000000..b2d934b
--- /dev/null
+++ b/google/appengine/_internal/django/core/servers/fastcgi.py
@@ -0,0 +1,183 @@
+"""
+FastCGI (or SCGI, or AJP1.3 ...) server that implements the WSGI protocol.
+
+Uses the flup python package: http://www.saddi.com/software/flup/
+
+This is a adaptation of the flup package to add FastCGI server support
+to run Django apps from Web servers that support the FastCGI protocol.
+This module can be run standalone or from the django-admin / manage.py
+scripts using the "runfcgi" directive.
+
+Run with the extra option "help" for a list of additional options you can
+pass to this server.
+"""
+
+from google.appengine._internal.django.utils import importlib
+import sys, os
+
+__version__ = "0.1"
+__all__ = ["runfastcgi"]
+
+FASTCGI_HELP = r"""
+ Run this project as a fastcgi (or some other protocol supported
+ by flup) application. To do this, the flup package from
+ http://www.saddi.com/software/flup/ is required.
+
+ runfcgi [options] [fcgi settings]
+
+Optional Fcgi settings: (setting=value)
+ protocol=PROTOCOL fcgi, scgi, ajp, ... (default fcgi)
+ host=HOSTNAME hostname to listen on.
+ port=PORTNUM port to listen on.
+ socket=FILE UNIX socket to listen on.
+ method=IMPL prefork or threaded (default prefork).
+ maxrequests=NUMBER number of requests a child handles before it is
+ killed and a new child is forked (0 = no limit).
+ maxspare=NUMBER max number of spare processes / threads.
+ minspare=NUMBER min number of spare processes / threads.
+ maxchildren=NUMBER hard limit number of processes / threads.
+ daemonize=BOOL whether to detach from terminal.
+ pidfile=FILE write the spawned process-id to this file.
+ workdir=DIRECTORY change to this directory when daemonizing.
+ debug=BOOL set to true to enable flup tracebacks.
+ outlog=FILE write stdout to this file.
+ errlog=FILE write stderr to this file.
+ umask=UMASK umask to use when daemonizing, in octal notation (default 022).
+
+Examples:
+ Run a "standard" fastcgi process on a file-descriptor
+ (for Web servers which spawn your processes for you)
+ $ manage.py runfcgi method=threaded
+
+ Run a scgi server on a TCP host/port
+ $ manage.py runfcgi protocol=scgi method=prefork host=127.0.0.1 port=8025
+
+ Run a fastcgi server on a UNIX domain socket (posix platforms only)
+ $ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
+
+ Run a fastCGI as a daemon and write the spawned PID in a file
+ $ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
+ daemonize=true pidfile=/var/run/django-fcgi.pid
+
+"""
+
+FASTCGI_OPTIONS = {
+ 'protocol': 'fcgi',
+ 'host': None,
+ 'port': None,
+ 'socket': None,
+ 'method': 'fork',
+ 'daemonize': None,
+ 'workdir': '/',
+ 'pidfile': None,
+ 'maxspare': 5,
+ 'minspare': 2,
+ 'maxchildren': 50,
+ 'maxrequests': 0,
+ 'debug': None,
+ 'outlog': None,
+ 'errlog': None,
+ 'umask': None,
+}
+
+def fastcgi_help(message=None):
+ print FASTCGI_HELP
+ if message:
+ print message
+ return False
+
+def runfastcgi(argset=[], **kwargs):
+ options = FASTCGI_OPTIONS.copy()
+ options.update(kwargs)
+ for x in argset:
+ if "=" in x:
+ k, v = x.split('=', 1)
+ else:
+ k, v = x, True
+ options[k.lower()] = v
+
+ if "help" in options:
+ return fastcgi_help()
+
+ try:
+ import flup
+ except ImportError, e:
+ print >> sys.stderr, "ERROR: %s" % e
+ print >> sys.stderr, " Unable to load the flup package. In order to run django"
+ print >> sys.stderr, " as a FastCGI application, you will need to get flup from"
+ print >> sys.stderr, " http://www.saddi.com/software/flup/ If you've already"
+ print >> sys.stderr, " installed flup, then make sure you have it in your PYTHONPATH."
+ return False
+
+ flup_module = 'server.' + options['protocol']
+
+ if options['method'] in ('prefork', 'fork'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxChildren': int(options["maxchildren"]),
+ 'maxRequests': int(options["maxrequests"]),
+ }
+ flup_module += '_fork'
+ elif options['method'] in ('thread', 'threaded'):
+ wsgi_opts = {
+ 'maxSpare': int(options["maxspare"]),
+ 'minSpare': int(options["minspare"]),
+ 'maxThreads': int(options["maxchildren"]),
+ }
+ else:
+ return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
+
+ wsgi_opts['debug'] = options['debug'] is not None
+
+ try:
+ module = importlib.import_module('.%s' % flup_module, 'flup')
+ WSGIServer = module.WSGIServer
+ except:
+ print "Can't import flup." + flup_module
+ return False
+
+ # Prep up and go
+ from google.appengine._internal.django.core.handlers.wsgi import WSGIHandler
+
+ if options["host"] and options["port"] and not options["socket"]:
+ wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
+ elif options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = options["socket"]
+ elif not options["socket"] and not options["host"] and not options["port"]:
+ wsgi_opts['bindAddress'] = None
+ else:
+ return fastcgi_help("Invalid combination of host, port, socket.")
+
+ if options["daemonize"] is None:
+ # Default to daemonizing if we're running on a socket/named pipe.
+ daemonize = (wsgi_opts['bindAddress'] is not None)
+ else:
+ if options["daemonize"].lower() in ('true', 'yes', 't'):
+ daemonize = True
+ elif options["daemonize"].lower() in ('false', 'no', 'f'):
+ daemonize = False
+ else:
+ return fastcgi_help("ERROR: Invalid option for daemonize parameter.")
+
+ daemon_kwargs = {}
+ if options['outlog']:
+ daemon_kwargs['out_log'] = options['outlog']
+ if options['errlog']:
+ daemon_kwargs['err_log'] = options['errlog']
+ if options['umask']:
+ daemon_kwargs['umask'] = int(options['umask'], 8)
+
+ if daemonize:
+ from google.appengine._internal.django.utils.daemonize import become_daemon
+ become_daemon(our_home_dir=options["workdir"], **daemon_kwargs)
+
+ if options["pidfile"]:
+ fp = open(options["pidfile"], "w")
+ fp.write("%d\n" % os.getpid())
+ fp.close()
+
+ WSGIServer(WSGIHandler(), **wsgi_opts).run()
+
+if __name__ == '__main__':
+ runfastcgi(sys.argv[1:])
diff --git a/google/appengine/_internal/django/core/signals.py b/google/appengine/_internal/django/core/signals.py
new file mode 100644
index 0000000..06c063c
--- /dev/null
+++ b/google/appengine/_internal/django/core/signals.py
@@ -0,0 +1,5 @@
+from google.appengine._internal.django.dispatch import Signal
+
+request_started = Signal()
+request_finished = Signal()
+got_request_exception = Signal(providing_args=["request"])
diff --git a/google/appengine/_internal/django/core/template_loader.py b/google/appengine/_internal/django/core/template_loader.py
new file mode 100644
index 0000000..4d719ae
--- /dev/null
+++ b/google/appengine/_internal/django/core/template_loader.py
@@ -0,0 +1,7 @@
+# This module is DEPRECATED!
+#
+# You should no longer be using django.template_loader.
+#
+# Use django.template.loader instead.
+
+from google.appengine._internal.django.template.loader import *
diff --git a/google/appengine/_internal/django/core/urlresolvers.py b/google/appengine/_internal/django/core/urlresolvers.py
new file mode 100644
index 0000000..f0c280e
--- /dev/null
+++ b/google/appengine/_internal/django/core/urlresolvers.py
@@ -0,0 +1,396 @@
+"""
+This module converts requested URLs to callback view functions.
+
+RegexURLResolver is the main class here. Its resolve() method takes a URL (as
+a string) and returns a tuple in this format:
+
+ (view_function, function_args, function_kwargs)
+"""
+
+import re
+
+from google.appengine._internal.django.http import Http404
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
+from google.appengine._internal.django.utils.datastructures import MultiValueDict
+from google.appengine._internal.django.utils.encoding import iri_to_uri, force_unicode, smart_str
+from google.appengine._internal.django.utils.functional import memoize
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.utils.regex_helper import normalize
+from google.appengine._internal.django.utils.thread_support import currentThread
+
+_resolver_cache = {} # Maps URLconf modules to RegexURLResolver instances.
+_callable_cache = {} # Maps view and url pattern names to their view functions.
+
+# SCRIPT_NAME prefixes for each thread are stored here. If there's no entry for
+# the current thread (which is the only one we ever access), it is assumed to
+# be empty.
+_prefixes = {}
+
+# Overridden URLconfs for each thread are stored here.
+_urlconfs = {}
+
+class Resolver404(Http404):
+ pass
+
+class NoReverseMatch(Exception):
+ # Don't make this raise an error when used in a template.
+ silent_variable_failure = True
+
+def get_callable(lookup_view, can_fail=False):
+ """
+ Convert a string version of a function name to the callable object.
+
+ If the lookup_view is not an import path, it is assumed to be a URL pattern
+ label and the original string is returned.
+
+ If can_fail is True, lookup_view might be a URL pattern label, so errors
+ during the import fail and the string is returned.
+ """
+ if not callable(lookup_view):
+ try:
+ # Bail early for non-ASCII strings (they can't be functions).
+ lookup_view = lookup_view.encode('ascii')
+ mod_name, func_name = get_mod_func(lookup_view)
+ if func_name != '':
+ lookup_view = getattr(import_module(mod_name), func_name)
+ if not callable(lookup_view):
+ raise AttributeError("'%s.%s' is not a callable." % (mod_name, func_name))
+ except (ImportError, AttributeError):
+ if not can_fail:
+ raise
+ except UnicodeEncodeError:
+ pass
+ return lookup_view
+get_callable = memoize(get_callable, _callable_cache, 1)
+
+def get_resolver(urlconf):
+ if urlconf is None:
+ from google.appengine._internal.django.conf import settings
+ urlconf = settings.ROOT_URLCONF
+ return RegexURLResolver(r'^/', urlconf)
+get_resolver = memoize(get_resolver, _resolver_cache, 1)
+
+def get_mod_func(callback):
+ # Converts 'django.views.news.stories.story_detail' to
+ # ['django.views.news.stories', 'story_detail']
+ try:
+ dot = callback.rindex('.')
+ except ValueError:
+ return callback, ''
+ return callback[:dot], callback[dot+1:]
+
+class RegexURLPattern(object):
+ def __init__(self, regex, callback, default_args=None, name=None):
+ # regex is a string representing a regular expression.
+ # callback is either a string like 'foo.views.news.stories.story_detail'
+ # which represents the path to a module and a view function name, or a
+ # callable object (view).
+ self.regex = re.compile(regex, re.UNICODE)
+ if callable(callback):
+ self._callback = callback
+ else:
+ self._callback = None
+ self._callback_str = callback
+ self.default_args = default_args or {}
+ self.name = name
+
+ def __repr__(self):
+ return '<%s %s %s>' % (self.__class__.__name__, self.name, self.regex.pattern)
+
+ def add_prefix(self, prefix):
+ """
+ Adds the prefix string to a string-based callback.
+ """
+ if not prefix or not hasattr(self, '_callback_str'):
+ return
+ self._callback_str = prefix + '.' + self._callback_str
+
+ def resolve(self, path):
+ match = self.regex.search(path)
+ if match:
+ # If there are any named groups, use those as kwargs, ignoring
+ # non-named groups. Otherwise, pass all non-named arguments as
+ # positional arguments.
+ kwargs = match.groupdict()
+ if kwargs:
+ args = ()
+ else:
+ args = match.groups()
+ # In both cases, pass any extra_kwargs as **kwargs.
+ kwargs.update(self.default_args)
+
+ return self.callback, args, kwargs
+
+ def _get_callback(self):
+ if self._callback is not None:
+ return self._callback
+ try:
+ self._callback = get_callable(self._callback_str)
+ except ImportError, e:
+ mod_name, _ = get_mod_func(self._callback_str)
+ raise ViewDoesNotExist("Could not import %s. Error was: %s" % (mod_name, str(e)))
+ except AttributeError, e:
+ mod_name, func_name = get_mod_func(self._callback_str)
+ raise ViewDoesNotExist("Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e)))
+ return self._callback
+ callback = property(_get_callback)
+
+class RegexURLResolver(object):
+ def __init__(self, regex, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
+ # regex is a string representing a regular expression.
+ # urlconf_name is a string representing the module containing URLconfs.
+ self.regex = re.compile(regex, re.UNICODE)
+ self.urlconf_name = urlconf_name
+ if not isinstance(urlconf_name, basestring):
+ self._urlconf_module = self.urlconf_name
+ self.callback = None
+ self.default_kwargs = default_kwargs or {}
+ self.namespace = namespace
+ self.app_name = app_name
+ self._reverse_dict = None
+ self._namespace_dict = None
+ self._app_dict = None
+
+ def __repr__(self):
+ return '<%s %s (%s:%s) %s>' % (self.__class__.__name__, self.urlconf_name, self.app_name, self.namespace, self.regex.pattern)
+
+ def _populate(self):
+ lookups = MultiValueDict()
+ namespaces = {}
+ apps = {}
+ for pattern in reversed(self.url_patterns):
+ p_pattern = pattern.regex.pattern
+ if p_pattern.startswith('^'):
+ p_pattern = p_pattern[1:]
+ if isinstance(pattern, RegexURLResolver):
+ if pattern.namespace:
+ namespaces[pattern.namespace] = (p_pattern, pattern)
+ if pattern.app_name:
+ apps.setdefault(pattern.app_name, []).append(pattern.namespace)
+ else:
+ parent = normalize(pattern.regex.pattern)
+ for name in pattern.reverse_dict:
+ for matches, pat in pattern.reverse_dict.getlist(name):
+ new_matches = []
+ for piece, p_args in parent:
+ new_matches.extend([(piece + suffix, p_args + args) for (suffix, args) in matches])
+ lookups.appendlist(name, (new_matches, p_pattern + pat))
+ for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
+ namespaces[namespace] = (p_pattern + prefix, sub_pattern)
+ for app_name, namespace_list in pattern.app_dict.items():
+ apps.setdefault(app_name, []).extend(namespace_list)
+ else:
+ bits = normalize(p_pattern)
+ lookups.appendlist(pattern.callback, (bits, p_pattern))
+ if pattern.name is not None:
+ lookups.appendlist(pattern.name, (bits, p_pattern))
+ self._reverse_dict = lookups
+ self._namespace_dict = namespaces
+ self._app_dict = apps
+
+ def _get_reverse_dict(self):
+ if self._reverse_dict is None:
+ self._populate()
+ return self._reverse_dict
+ reverse_dict = property(_get_reverse_dict)
+
+ def _get_namespace_dict(self):
+ if self._namespace_dict is None:
+ self._populate()
+ return self._namespace_dict
+ namespace_dict = property(_get_namespace_dict)
+
+ def _get_app_dict(self):
+ if self._app_dict is None:
+ self._populate()
+ return self._app_dict
+ app_dict = property(_get_app_dict)
+
+ def resolve(self, path):
+ tried = []
+ match = self.regex.search(path)
+ if match:
+ new_path = path[match.end():]
+ for pattern in self.url_patterns:
+ try:
+ sub_match = pattern.resolve(new_path)
+ except Resolver404, e:
+ sub_tried = e.args[0].get('tried')
+ if sub_tried is not None:
+ tried.extend([(pattern.regex.pattern + ' ' + t) for t in sub_tried])
+ else:
+ tried.append(pattern.regex.pattern)
+ else:
+ if sub_match:
+ sub_match_dict = dict([(smart_str(k), v) for k, v in match.groupdict().items()])
+ sub_match_dict.update(self.default_kwargs)
+ for k, v in sub_match[2].iteritems():
+ sub_match_dict[smart_str(k)] = v
+ return sub_match[0], sub_match[1], sub_match_dict
+ tried.append(pattern.regex.pattern)
+ raise Resolver404({'tried': tried, 'path': new_path})
+ raise Resolver404({'path' : path})
+
+ def _get_urlconf_module(self):
+ try:
+ return self._urlconf_module
+ except AttributeError:
+ self._urlconf_module = import_module(self.urlconf_name)
+ return self._urlconf_module
+ urlconf_module = property(_get_urlconf_module)
+
+ def _get_url_patterns(self):
+ patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
+ try:
+ iter(patterns)
+ except TypeError:
+ raise ImproperlyConfigured("The included urlconf %s doesn't have any patterns in it" % self.urlconf_name)
+ return patterns
+ url_patterns = property(_get_url_patterns)
+
+ def _resolve_special(self, view_type):
+ callback = getattr(self.urlconf_module, 'handler%s' % view_type)
+ try:
+ return get_callable(callback), {}
+ except (ImportError, AttributeError), e:
+ raise ViewDoesNotExist("Tried %s. Error was: %s" % (callback, str(e)))
+
+ def resolve404(self):
+ return self._resolve_special('404')
+
+ def resolve500(self):
+ return self._resolve_special('500')
+
+ def reverse(self, lookup_view, *args, **kwargs):
+ if args and kwargs:
+ raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
+ try:
+ lookup_view = get_callable(lookup_view, True)
+ except (ImportError, AttributeError), e:
+ raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e))
+ possibilities = self.reverse_dict.getlist(lookup_view)
+ for possibility, pattern in possibilities:
+ for result, params in possibility:
+ if args:
+ if len(args) != len(params):
+ continue
+ unicode_args = [force_unicode(val) for val in args]
+ candidate = result % dict(zip(params, unicode_args))
+ else:
+ if set(kwargs.keys()) != set(params):
+ continue
+ unicode_kwargs = dict([(k, force_unicode(v)) for (k, v) in kwargs.items()])
+ candidate = result % unicode_kwargs
+ if re.search(u'^%s' % pattern, candidate, re.UNICODE):
+ return candidate
+ # lookup_view can be URL label, or dotted path, or callable, Any of
+ # these can be passed in at the top, but callables are not friendly in
+ # error messages.
+ m = getattr(lookup_view, '__module__', None)
+ n = getattr(lookup_view, '__name__', None)
+ if m is not None and n is not None:
+ lookup_view_s = "%s.%s" % (m, n)
+ else:
+ lookup_view_s = lookup_view
+ raise NoReverseMatch("Reverse for '%s' with arguments '%s' and keyword "
+ "arguments '%s' not found." % (lookup_view_s, args, kwargs))
+
+def resolve(path, urlconf=None):
+ if urlconf is None:
+ urlconf = get_urlconf()
+ return get_resolver(urlconf).resolve(path)
+
+def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None, current_app=None):
+ if urlconf is None:
+ urlconf = get_urlconf()
+ resolver = get_resolver(urlconf)
+ args = args or []
+ kwargs = kwargs or {}
+
+ if prefix is None:
+ prefix = get_script_prefix()
+
+ if not isinstance(viewname, basestring):
+ view = viewname
+ else:
+ parts = viewname.split(':')
+ parts.reverse()
+ view = parts[0]
+ path = parts[1:]
+
+ resolved_path = []
+ while path:
+ ns = path.pop()
+
+ # Lookup the name to see if it could be an app identifier
+ try:
+ app_list = resolver.app_dict[ns]
+ # Yes! Path part matches an app in the current Resolver
+ if current_app and current_app in app_list:
+ # If we are reversing for a particular app, use that namespace
+ ns = current_app
+ elif ns not in app_list:
+ # The name isn't shared by one of the instances (i.e., the default)
+ # so just pick the first instance as the default.
+ ns = app_list[0]
+ except KeyError:
+ pass
+
+ try:
+ extra, resolver = resolver.namespace_dict[ns]
+ resolved_path.append(ns)
+ prefix = prefix + extra
+ except KeyError, key:
+ if resolved_path:
+ raise NoReverseMatch("%s is not a registered namespace inside '%s'" % (key, ':'.join(resolved_path)))
+ else:
+ raise NoReverseMatch("%s is not a registered namespace" % key)
+
+ return iri_to_uri(u'%s%s' % (prefix, resolver.reverse(view,
+ *args, **kwargs)))
+
+def clear_url_caches():
+ global _resolver_cache
+ global _callable_cache
+ _resolver_cache.clear()
+ _callable_cache.clear()
+
+def set_script_prefix(prefix):
+ """
+ Sets the script prefix for the current thread.
+ """
+ if not prefix.endswith('/'):
+ prefix += '/'
+ _prefixes[currentThread()] = prefix
+
+def get_script_prefix():
+ """
+ Returns the currently active script prefix. Useful for client code that
+ wishes to construct their own URLs manually (although accessing the request
+ instance is normally going to be a lot cleaner).
+ """
+ return _prefixes.get(currentThread(), u'/')
+
+def set_urlconf(urlconf_name):
+ """
+ Sets the URLconf for the current thread (overriding the default one in
+ settings). Set to None to revert back to the default.
+ """
+ thread = currentThread()
+ if urlconf_name:
+ _urlconfs[thread] = urlconf_name
+ else:
+ # faster than wrapping in a try/except
+ if thread in _urlconfs:
+ del _urlconfs[thread]
+
+def get_urlconf(default=None):
+ """
+ Returns the root URLconf to use for the current thread if it has been
+ changed from the default one.
+ """
+ thread = currentThread()
+ if thread in _urlconfs:
+ return _urlconfs[thread]
+ return default
diff --git a/google/appengine/_internal/django/core/validators.py b/google/appengine/_internal/django/core/validators.py
new file mode 100644
index 0000000..b4f3ff9
--- /dev/null
+++ b/google/appengine/_internal/django/core/validators.py
@@ -0,0 +1,172 @@
+import re
+import urlparse
+
+from google.appengine._internal.django.core.exceptions import ValidationError
+from google.appengine._internal.django.utils.translation import ugettext_lazy as _
+from google.appengine._internal.django.utils.encoding import smart_unicode
+
+# These values, if given to validate(), will trigger the self.required check.
+EMPTY_VALUES = (None, '', [], (), {})
+
+try:
+ from google.appengine._internal.django.conf import settings
+ URL_VALIDATOR_USER_AGENT = settings.URL_VALIDATOR_USER_AGENT
+except ImportError:
+ # It's OK if Django settings aren't configured.
+ URL_VALIDATOR_USER_AGENT = 'Django (http://www.djangoproject.com/)'
+
+class RegexValidator(object):
+ regex = ''
+ message = _(u'Enter a valid value.')
+ code = 'invalid'
+
+ def __init__(self, regex=None, message=None, code=None):
+ if regex is not None:
+ self.regex = regex
+ if message is not None:
+ self.message = message
+ if code is not None:
+ self.code = code
+
+ if isinstance(self.regex, basestring):
+ self.regex = re.compile(regex)
+
+ def __call__(self, value):
+ """
+ Validates that the input matches the regular expression.
+ """
+ if not self.regex.search(smart_unicode(value)):
+ raise ValidationError(self.message, code=self.code)
+
+class URLValidator(RegexValidator):
+ regex = re.compile(
+ r'^https?://' # http:// or https://
+ r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' #domain...
+ r'localhost|' #localhost...
+ r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
+ r'(?::\d+)?' # optional port
+ r'(?:/?|[/?]\S+)$', re.IGNORECASE)
+
+ def __init__(self, verify_exists=False, validator_user_agent=URL_VALIDATOR_USER_AGENT):
+ super(URLValidator, self).__init__()
+ self.verify_exists = verify_exists
+ self.user_agent = validator_user_agent
+
+ def __call__(self, value):
+ try:
+ super(URLValidator, self).__call__(value)
+ except ValidationError, e:
+ # Trivial case failed. Try for possible IDN domain
+ if value:
+ value = smart_unicode(value)
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
+ try:
+ netloc = netloc.encode('idna') # IDN -> ACE
+ except UnicodeError: # invalid domain part
+ raise e
+ url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
+ super(URLValidator, self).__call__(url)
+ else:
+ raise
+ else:
+ url = value
+
+ if self.verify_exists:
+ import urllib2
+ headers = {
+ "Accept": "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
+ "Accept-Language": "en-us,en;q=0.5",
+ "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
+ "Connection": "close",
+ "User-Agent": self.user_agent,
+ }
+ try:
+ req = urllib2.Request(url, None, headers)
+ u = urllib2.urlopen(req)
+ except ValueError:
+ raise ValidationError(_(u'Enter a valid URL.'), code='invalid')
+ except: # urllib2.URLError, httplib.InvalidURL, etc.
+ raise ValidationError(_(u'This URL appears to be a broken link.'), code='invalid_link')
+
+
+def validate_integer(value):
+ try:
+ int(value)
+ except (ValueError, TypeError), e:
+ raise ValidationError('')
+
+class EmailValidator(RegexValidator):
+
+ def __call__(self, value):
+ try:
+ super(EmailValidator, self).__call__(value)
+ except ValidationError, e:
+ # Trivial case failed. Try for possible IDN domain-part
+ if value and u'@' in value:
+ parts = value.split(u'@')
+ domain_part = parts[-1]
+ try:
+ parts[-1] = parts[-1].encode('idna')
+ except UnicodeError:
+ raise e
+ super(EmailValidator, self).__call__(u'@'.join(parts))
+ else:
+ raise
+
+email_re = re.compile(
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
+ r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE) # domain
+validate_email = EmailValidator(email_re, _(u'Enter a valid e-mail address.'), 'invalid')
+
+slug_re = re.compile(r'^[-\w]+$')
+validate_slug = RegexValidator(slug_re, _(u"Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."), 'invalid')
+
+ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
+validate_ipv4_address = RegexValidator(ipv4_re, _(u'Enter a valid IPv4 address.'), 'invalid')
+
+comma_separated_int_list_re = re.compile('^[\d,]+$')
+validate_comma_separated_integer_list = RegexValidator(comma_separated_int_list_re, _(u'Enter only digits separated by commas.'), 'invalid')
+
+
+class BaseValidator(object):
+ compare = lambda self, a, b: a is not b
+ clean = lambda self, x: x
+ message = _(u'Ensure this value is %(limit_value)s (it is %(show_value)s).')
+ code = 'limit_value'
+
+ def __init__(self, limit_value):
+ self.limit_value = limit_value
+
+ def __call__(self, value):
+ cleaned = self.clean(value)
+ params = {'limit_value': self.limit_value, 'show_value': cleaned}
+ if self.compare(cleaned, self.limit_value):
+ raise ValidationError(
+ self.message % params,
+ code=self.code,
+ params=params,
+ )
+
+class MaxValueValidator(BaseValidator):
+ compare = lambda self, a, b: a > b
+ message = _(u'Ensure this value is less than or equal to %(limit_value)s.')
+ code = 'max_value'
+
+class MinValueValidator(BaseValidator):
+ compare = lambda self, a, b: a < b
+ message = _(u'Ensure this value is greater than or equal to %(limit_value)s.')
+ code = 'min_value'
+
+class MinLengthValidator(BaseValidator):
+ compare = lambda self, a, b: a < b
+ clean = lambda self, x: len(x)
+ message = _(u'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).')
+ code = 'min_length'
+
+class MaxLengthValidator(BaseValidator):
+ compare = lambda self, a, b: a > b
+ clean = lambda self, x: len(x)
+ message = _(u'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).')
+ code = 'max_length'
+
diff --git a/google/appengine/_internal/django/core/xheaders.py b/google/appengine/_internal/django/core/xheaders.py
new file mode 100644
index 0000000..16cc6c3
--- /dev/null
+++ b/google/appengine/_internal/django/core/xheaders.py
@@ -0,0 +1,24 @@
+"""
+Pages in Django can are served up with custom HTTP headers containing useful
+information about those pages -- namely, the content type and object ID.
+
+This module contains utility functions for retrieving and doing interesting
+things with these special "X-Headers" (so called because the HTTP spec demands
+that custom headers are prefixed with "X-").
+
+Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
+"""
+
+def populate_xheaders(request, response, model, object_id):
+ """
+ Adds the "X-Object-Type" and "X-Object-Id" headers to the given
+ HttpResponse according to the given model and object_id -- but only if the
+ given HttpRequest object has an IP address within the INTERNAL_IPS setting
+ or if the request is from a logged in staff member.
+ """
+ from google.appengine._internal.django.conf import settings
+ if (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
+ or (hasattr(request, 'user') and request.user.is_active
+ and request.user.is_staff)):
+ response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
+ response['X-Object-Id'] = str(object_id)
diff --git a/google/appengine/_internal/django/template/__init__.py b/google/appengine/_internal/django/template/__init__.py
new file mode 100644
index 0000000..670ede0
--- /dev/null
+++ b/google/appengine/_internal/django/template/__init__.py
@@ -0,0 +1,1050 @@
+"""
+This is the Django template system.
+
+How it works:
+
+The Lexer.tokenize() function converts a template string (i.e., a string containing
+markup with custom template tags) to tokens, which can be either plain text
+(TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK).
+
+The Parser() class takes a list of tokens in its constructor, and its parse()
+method returns a compiled template -- which is, under the hood, a list of
+Node objects.
+
+Each Node is responsible for creating some sort of output -- e.g. simple text
+(TextNode), variable values in a given context (VariableNode), results of basic
+logic (IfNode), results of looping (ForNode), or anything else. The core Node
+types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
+define their own custom node types.
+
+Each Node has a render() method, which takes a Context and returns a string of
+the rendered node. For example, the render() method of a Variable Node returns
+the variable's value as a string. The render() method of an IfNode returns the
+rendered output of whatever was inside the loop, recursively.
+
+The Template class is a convenient wrapper that takes care of template
+compilation and rendering.
+
+Usage:
+
+The only thing you should ever use directly in this file is the Template class.
+Create a compiled template object with a template_string, then call render()
+with a context. In the compilation stage, the TemplateSyntaxError exception
+will be raised if the template doesn't have proper syntax.
+
+Sample code:
+
+>>> from google.appengine._internal.django.import template
+>>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
+>>> t = template.Template(s)
+
+(t is now a compiled template, and its render() method can be called multiple
+times with multiple contexts)
+
+>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
+>>> t.render(c)
+u'<html><h1>Hello</h1></html>'
+>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
+>>> t.render(c)
+u'<html></html>'
+"""
+import imp
+import re
+from inspect import getargspec
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.template.context import Context, RequestContext, ContextPopException
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.utils.itercompat import is_iterable
+from google.appengine._internal.django.utils.functional import curry, Promise
+from google.appengine._internal.django.utils.text import smart_split, unescape_string_literal, get_text_list
+from google.appengine._internal.django.utils.encoding import smart_unicode, force_unicode, smart_str
+from google.appengine._internal.django.utils.translation import ugettext_lazy
+from google.appengine._internal.django.utils.safestring import SafeData, EscapeData, mark_safe, mark_for_escaping
+from google.appengine._internal.django.utils.formats import localize
+from google.appengine._internal.django.utils.html import escape
+from google.appengine._internal.django.utils.module_loading import module_has_submodule
+
+__all__ = ('Template', 'Context', 'RequestContext', 'compile_string')
+
+TOKEN_TEXT = 0
+TOKEN_VAR = 1
+TOKEN_BLOCK = 2
+TOKEN_COMMENT = 3
+
+# template syntax constants
+FILTER_SEPARATOR = '|'
+FILTER_ARGUMENT_SEPARATOR = ':'
+VARIABLE_ATTRIBUTE_SEPARATOR = '.'
+BLOCK_TAG_START = '{%'
+BLOCK_TAG_END = '%}'
+VARIABLE_TAG_START = '{{'
+VARIABLE_TAG_END = '}}'
+COMMENT_TAG_START = '{#'
+COMMENT_TAG_END = '#}'
+SINGLE_BRACE_START = '{'
+SINGLE_BRACE_END = '}'
+
+ALLOWED_VARIABLE_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.'
+
+# what to report as the origin for templates that come from non-loader sources
+# (e.g. strings)
+UNKNOWN_SOURCE = '<unknown source>'
+
+# match a variable or block tag and capture the entire tag, including start/end delimiters
+tag_re = re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
+ re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
+ re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END)))
+
+# global dictionary of libraries that have been loaded using get_library
+libraries = {}
+# global list of libraries to load by default for a new parser
+builtins = []
+
+# True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means
+# uninitialised.
+invalid_var_format_string = None
+
+class TemplateSyntaxError(Exception):
+ pass
+
+class TemplateDoesNotExist(Exception):
+ pass
+
+class TemplateEncodingError(Exception):
+ pass
+
+class VariableDoesNotExist(Exception):
+
+ def __init__(self, msg, params=()):
+ self.msg = msg
+ self.params = params
+
+ def __str__(self):
+ return unicode(self).encode('utf-8')
+
+ def __unicode__(self):
+ return self.msg % tuple([force_unicode(p, errors='replace') for p in self.params])
+
+class InvalidTemplateLibrary(Exception):
+ pass
+
+class Origin(object):
+ def __init__(self, name):
+ self.name = name
+
+ def reload(self):
+ raise NotImplementedError
+
+ def __str__(self):
+ return self.name
+
+class StringOrigin(Origin):
+ def __init__(self, source):
+ super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
+ self.source = source
+
+ def reload(self):
+ return self.source
+
+class Template(object):
+ def __init__(self, template_string, origin=None, name='<Unknown Template>'):
+ try:
+ template_string = smart_unicode(template_string)
+ except UnicodeDecodeError:
+ raise TemplateEncodingError("Templates can only be constructed from unicode or UTF-8 strings.")
+ if settings.TEMPLATE_DEBUG and origin is None:
+ origin = StringOrigin(template_string)
+ self.nodelist = compile_string(template_string, origin)
+ self.name = name
+
+ def __iter__(self):
+ for node in self.nodelist:
+ for subnode in node:
+ yield subnode
+
+ def _render(self, context):
+ return self.nodelist.render(context)
+
+ def render(self, context):
+ "Display stage -- can be called many times"
+ context.render_context.push()
+ try:
+ return self._render(context)
+ finally:
+ context.render_context.pop()
+
+def compile_string(template_string, origin):
+ "Compiles template_string into NodeList ready for rendering"
+ if settings.TEMPLATE_DEBUG:
+ from debug import DebugLexer, DebugParser
+ lexer_class, parser_class = DebugLexer, DebugParser
+ else:
+ lexer_class, parser_class = Lexer, Parser
+ lexer = lexer_class(template_string, origin)
+ parser = parser_class(lexer.tokenize())
+ return parser.parse()
+
+class Token(object):
+ def __init__(self, token_type, contents):
+ # token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or TOKEN_COMMENT.
+ self.token_type, self.contents = token_type, contents
+
+ def __str__(self):
+ return '<%s token: "%s...">' % ({TOKEN_TEXT: 'Text', TOKEN_VAR: 'Var', TOKEN_BLOCK: 'Block', TOKEN_COMMENT: 'Comment'}[self.token_type],
+ self.contents[:20].replace('\n', ''))
+
+ def split_contents(self):
+ split = []
+ bits = iter(smart_split(self.contents))
+ for bit in bits:
+ # Handle translation-marked template pieces
+ if bit.startswith('_("') or bit.startswith("_('"):
+ sentinal = bit[2] + ')'
+ trans_bit = [bit]
+ while not bit.endswith(sentinal):
+ bit = bits.next()
+ trans_bit.append(bit)
+ bit = ' '.join(trans_bit)
+ split.append(bit)
+ return split
+
+class Lexer(object):
+ def __init__(self, template_string, origin):
+ self.template_string = template_string
+ self.origin = origin
+
+ def tokenize(self):
+ "Return a list of tokens from a given template_string."
+ in_tag = False
+ result = []
+ for bit in tag_re.split(self.template_string):
+ if bit:
+ result.append(self.create_token(bit, in_tag))
+ in_tag = not in_tag
+ return result
+
+ def create_token(self, token_string, in_tag):
+ """
+ Convert the given token string into a new Token object and return it.
+ If in_tag is True, we are processing something that matched a tag,
+ otherwise it should be treated as a literal string.
+ """
+ if in_tag:
+ if token_string.startswith(VARIABLE_TAG_START):
+ token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
+ elif token_string.startswith(BLOCK_TAG_START):
+ token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
+ elif token_string.startswith(COMMENT_TAG_START):
+ token = Token(TOKEN_COMMENT, '')
+ else:
+ token = Token(TOKEN_TEXT, token_string)
+ return token
+
+class Parser(object):
+ def __init__(self, tokens):
+ self.tokens = tokens
+ self.tags = {}
+ self.filters = {}
+ for lib in builtins:
+ self.add_library(lib)
+
+ def parse(self, parse_until=None):
+ if parse_until is None: parse_until = []
+ nodelist = self.create_nodelist()
+ while self.tokens:
+ token = self.next_token()
+ if token.token_type == TOKEN_TEXT:
+ self.extend_nodelist(nodelist, TextNode(token.contents), token)
+ elif token.token_type == TOKEN_VAR:
+ if not token.contents:
+ self.empty_variable(token)
+ filter_expression = self.compile_filter(token.contents)
+ var_node = self.create_variable_node(filter_expression)
+ self.extend_nodelist(nodelist, var_node,token)
+ elif token.token_type == TOKEN_BLOCK:
+ if token.contents in parse_until:
+ # put token back on token list so calling code knows why it terminated
+ self.prepend_token(token)
+ return nodelist
+ try:
+ command = token.contents.split()[0]
+ except IndexError:
+ self.empty_block_tag(token)
+ # execute callback function for this tag and append resulting node
+ self.enter_command(command, token)
+ try:
+ compile_func = self.tags[command]
+ except KeyError:
+ self.invalid_block_tag(token, command, parse_until)
+ try:
+ compiled_result = compile_func(self, token)
+ except TemplateSyntaxError, e:
+ if not self.compile_function_error(token, e):
+ raise
+ self.extend_nodelist(nodelist, compiled_result, token)
+ self.exit_command()
+ if parse_until:
+ self.unclosed_block_tag(parse_until)
+ return nodelist
+
+ def skip_past(self, endtag):
+ while self.tokens:
+ token = self.next_token()
+ if token.token_type == TOKEN_BLOCK and token.contents == endtag:
+ return
+ self.unclosed_block_tag([endtag])
+
+ def create_variable_node(self, filter_expression):
+ return VariableNode(filter_expression)
+
+ def create_nodelist(self):
+ return NodeList()
+
+ def extend_nodelist(self, nodelist, node, token):
+ if node.must_be_first and nodelist:
+ try:
+ if nodelist.contains_nontext:
+ raise AttributeError
+ except AttributeError:
+ raise TemplateSyntaxError("%r must be the first tag in the template." % node)
+ if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
+ nodelist.contains_nontext = True
+ nodelist.append(node)
+
+ def enter_command(self, command, token):
+ pass
+
+ def exit_command(self):
+ pass
+
+ def error(self, token, msg):
+ return TemplateSyntaxError(msg)
+
+ def empty_variable(self, token):
+ raise self.error(token, "Empty variable tag")
+
+ def empty_block_tag(self, token):
+ raise self.error(token, "Empty block tag")
+
+ def invalid_block_tag(self, token, command, parse_until=None):
+ if parse_until:
+ raise self.error(token, "Invalid block tag: '%s', expected %s" % (command, get_text_list(["'%s'" % p for p in parse_until])))
+ raise self.error(token, "Invalid block tag: '%s'" % command)
+
+ def unclosed_block_tag(self, parse_until):
+ raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
+
+ def compile_function_error(self, token, e):
+ pass
+
+ def next_token(self):
+ return self.tokens.pop(0)
+
+ def prepend_token(self, token):
+ self.tokens.insert(0, token)
+
+ def delete_first_token(self):
+ del self.tokens[0]
+
+ def add_library(self, lib):
+ self.tags.update(lib.tags)
+ self.filters.update(lib.filters)
+
+ def compile_filter(self, token):
+ "Convenient wrapper for FilterExpression"
+ return FilterExpression(token, self)
+
+ def find_filter(self, filter_name):
+ if filter_name in self.filters:
+ return self.filters[filter_name]
+ else:
+ raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
+
+class TokenParser(object):
+ """
+ Subclass this and implement the top() method to parse a template line. When
+ instantiating the parser, pass in the line from the Django template parser.
+
+ The parser's "tagname" instance-variable stores the name of the tag that
+ the filter was called with.
+ """
+ def __init__(self, subject):
+ self.subject = subject
+ self.pointer = 0
+ self.backout = []
+ self.tagname = self.tag()
+
+ def top(self):
+ "Overload this method to do the actual parsing and return the result."
+ raise NotImplementedError()
+
+ def more(self):
+ "Returns True if there is more stuff in the tag."
+ return self.pointer < len(self.subject)
+
+ def back(self):
+ "Undoes the last microparser. Use this for lookahead and backtracking."
+ if not len(self.backout):
+ raise TemplateSyntaxError("back called without some previous parsing")
+ self.pointer = self.backout.pop()
+
+ def tag(self):
+ "A microparser that just returns the next tag from the line."
+ subject = self.subject
+ i = self.pointer
+ if i >= len(subject):
+ raise TemplateSyntaxError("expected another tag, found end of string: %s" % subject)
+ p = i
+ while i < len(subject) and subject[i] not in (' ', '\t'):
+ i += 1
+ s = subject[p:i]
+ while i < len(subject) and subject[i] in (' ', '\t'):
+ i += 1
+ self.backout.append(self.pointer)
+ self.pointer = i
+ return s
+
+ def value(self):
+ "A microparser that parses for a value: some string constant or variable name."
+ subject = self.subject
+ i = self.pointer
+
+ def next_space_index(subject, i):
+ "Increment pointer until a real space (i.e. a space not within quotes) is encountered"
+ while i < len(subject) and subject[i] not in (' ', '\t'):
+ if subject[i] in ('"', "'"):
+ c = subject[i]
+ i += 1
+ while i < len(subject) and subject[i] != c:
+ i += 1
+ if i >= len(subject):
+ raise TemplateSyntaxError("Searching for value. Unexpected end of string in column %d: %s" % (i, subject))
+ i += 1
+ return i
+
+ if i >= len(subject):
+ raise TemplateSyntaxError("Searching for value. Expected another value but found end of string: %s" % subject)
+ if subject[i] in ('"', "'"):
+ p = i
+ i += 1
+ while i < len(subject) and subject[i] != subject[p]:
+ i += 1
+ if i >= len(subject):
+ raise TemplateSyntaxError("Searching for value. Unexpected end of string in column %d: %s" % (i, subject))
+ i += 1
+
+ # Continue parsing until next "real" space, so that filters are also included
+ i = next_space_index(subject, i)
+
+ res = subject[p:i]
+ while i < len(subject) and subject[i] in (' ', '\t'):
+ i += 1
+ self.backout.append(self.pointer)
+ self.pointer = i
+ return res
+ else:
+ p = i
+ i = next_space_index(subject, i)
+ s = subject[p:i]
+ while i < len(subject) and subject[i] in (' ', '\t'):
+ i += 1
+ self.backout.append(self.pointer)
+ self.pointer = i
+ return s
+
+# This only matches constant *strings* (things in quotes or marked for
+# translation). Numbers are treated as variables for implementation reasons
+# (so that they retain their type when passed to filters).
+constant_string = r"""
+(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
+%(i18n_open)s%(strsq)s%(i18n_close)s|
+%(strdq)s|
+%(strsq)s)
+""" % {
+ 'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
+ 'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
+ 'i18n_open' : re.escape("_("),
+ 'i18n_close' : re.escape(")"),
+ }
+constant_string = constant_string.replace("\n", "")
+
+filter_raw_string = r"""
+^(?P<constant>%(constant)s)|
+^(?P<var>[%(var_chars)s]+|%(num)s)|
+ (?:%(filter_sep)s
+ (?P<filter_name>\w+)
+ (?:%(arg_sep)s
+ (?:
+ (?P<constant_arg>%(constant)s)|
+ (?P<var_arg>[%(var_chars)s]+|%(num)s)
+ )
+ )?
+ )""" % {
+ 'constant': constant_string,
+ 'num': r'[-+\.]?\d[\d\.e]*',
+ 'var_chars': "\w\." ,
+ 'filter_sep': re.escape(FILTER_SEPARATOR),
+ 'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
+ }
+
+filter_re = re.compile(filter_raw_string, re.UNICODE|re.VERBOSE)
+
+class FilterExpression(object):
+ r"""
+ Parses a variable token and its optional filters (all as a single string),
+ and return a list of tuples of the filter name and arguments.
+ Sample:
+ >>> token = 'variable|default:"Default value"|date:"Y-m-d"'
+ >>> p = Parser('')
+ >>> fe = FilterExpression(token, p)
+ >>> len(fe.filters)
+ 2
+ >>> fe.var
+ <Variable: 'variable'>
+
+ This class should never be instantiated outside of the
+ get_filters_from_token helper function.
+ """
+ def __init__(self, token, parser):
+ self.token = token
+ matches = filter_re.finditer(token)
+ var_obj = None
+ filters = []
+ upto = 0
+ for match in matches:
+ start = match.start()
+ if upto != start:
+ raise TemplateSyntaxError("Could not parse some characters: %s|%s|%s" % (token[:upto], token[upto:start], token[start:]))
+ if var_obj is None:
+ var, constant = match.group("var", "constant")
+ if constant:
+ try:
+ var_obj = Variable(constant).resolve({})
+ except VariableDoesNotExist:
+ var_obj = None
+ elif var is None:
+ raise TemplateSyntaxError("Could not find variable at start of %s." % token)
+ else:
+ var_obj = Variable(var)
+ else:
+ filter_name = match.group("filter_name")
+ args = []
+ constant_arg, var_arg = match.group("constant_arg", "var_arg")
+ if constant_arg:
+ args.append((False, Variable(constant_arg).resolve({})))
+ elif var_arg:
+ args.append((True, Variable(var_arg)))
+ filter_func = parser.find_filter(filter_name)
+ self.args_check(filter_name, filter_func, args)
+ filters.append((filter_func, args))
+ upto = match.end()
+ if upto != len(token):
+ raise TemplateSyntaxError("Could not parse the remainder: '%s' from '%s'" % (token[upto:], token))
+
+ self.filters = filters
+ self.var = var_obj
+
+ def resolve(self, context, ignore_failures=False):
+ if isinstance(self.var, Variable):
+ try:
+ obj = self.var.resolve(context)
+ except VariableDoesNotExist:
+ if ignore_failures:
+ obj = None
+ else:
+ if settings.TEMPLATE_STRING_IF_INVALID:
+ global invalid_var_format_string
+ if invalid_var_format_string is None:
+ invalid_var_format_string = '%s' in settings.TEMPLATE_STRING_IF_INVALID
+ if invalid_var_format_string:
+ return settings.TEMPLATE_STRING_IF_INVALID % self.var
+ return settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ obj = settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ obj = self.var
+ for func, args in self.filters:
+ arg_vals = []
+ for lookup, arg in args:
+ if not lookup:
+ arg_vals.append(mark_safe(arg))
+ else:
+ arg_vals.append(arg.resolve(context))
+ if getattr(func, 'needs_autoescape', False):
+ new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
+ else:
+ new_obj = func(obj, *arg_vals)
+ if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
+ obj = mark_safe(new_obj)
+ elif isinstance(obj, EscapeData):
+ obj = mark_for_escaping(new_obj)
+ else:
+ obj = new_obj
+ return obj
+
+ def args_check(name, func, provided):
+ provided = list(provided)
+ plen = len(provided)
+ # Check to see if a decorator is providing the real function.
+ func = getattr(func, '_decorated_function', func)
+ args, varargs, varkw, defaults = getargspec(func)
+ # First argument is filter input.
+ args.pop(0)
+ if defaults:
+ nondefs = args[:-len(defaults)]
+ else:
+ nondefs = args
+ # Args without defaults must be provided.
+ try:
+ for arg in nondefs:
+ provided.pop(0)
+ except IndexError:
+ # Not enough
+ raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, len(nondefs), plen))
+
+ # Defaults can be overridden.
+ defaults = defaults and list(defaults) or []
+ try:
+ for parg in provided:
+ defaults.pop(0)
+ except IndexError:
+ # Too many.
+ raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, len(nondefs), plen))
+
+ return True
+ args_check = staticmethod(args_check)
+
+ def __str__(self):
+ return self.token
+
+def resolve_variable(path, context):
+ """
+ Returns the resolved variable, which may contain attribute syntax, within
+ the given context.
+
+ Deprecated; use the Variable class instead.
+ """
+ return Variable(path).resolve(context)
+
+class Variable(object):
+ r"""
+ A template variable, resolvable against a given context. The variable may be
+ a hard-coded string (if it begins and ends with single or double quote
+ marks)::
+
+ >>> c = {'article': {'section':u'News'}}
+ >>> Variable('article.section').resolve(c)
+ u'News'
+ >>> Variable('article').resolve(c)
+ {'section': u'News'}
+ >>> class AClass: pass
+ >>> c = AClass()
+ >>> c.article = AClass()
+ >>> c.article.section = u'News'
+
+ (The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
+ """
+
+ def __init__(self, var):
+ self.var = var
+ self.literal = None
+ self.lookups = None
+ self.translate = False
+
+ try:
+ # First try to treat this variable as a number.
+ #
+ # Note that this could cause an OverflowError here that we're not
+ # catching. Since this should only happen at compile time, that's
+ # probably OK.
+ self.literal = float(var)
+
+ # So it's a float... is it an int? If the original value contained a
+ # dot or an "e" then it was a float, not an int.
+ if '.' not in var and 'e' not in var.lower():
+ self.literal = int(self.literal)
+
+ # "2." is invalid
+ if var.endswith('.'):
+ raise ValueError
+
+ except ValueError:
+ # A ValueError means that the variable isn't a number.
+ if var.startswith('_(') and var.endswith(')'):
+ # The result of the lookup should be translated at rendering
+ # time.
+ self.translate = True
+ var = var[2:-1]
+ # If it's wrapped with quotes (single or double), then
+ # we're also dealing with a literal.
+ try:
+ self.literal = mark_safe(unescape_string_literal(var))
+ except ValueError:
+ # Otherwise we'll set self.lookups so that resolve() knows we're
+ # dealing with a bonafide variable
+ if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
+ raise TemplateSyntaxError("Variables and attributes may not begin with underscores: '%s'" % var)
+ self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
+
+ def resolve(self, context):
+ """Resolve this variable against a given context."""
+ if self.lookups is not None:
+ # We're dealing with a variable that needs to be resolved
+ value = self._resolve_lookup(context)
+ else:
+ # We're dealing with a literal, so it's already been "resolved"
+ value = self.literal
+ if self.translate:
+ return ugettext_lazy(value)
+ return value
+
+ def __repr__(self):
+ return "<%s: %r>" % (self.__class__.__name__, self.var)
+
+ def __str__(self):
+ return self.var
+
+ def _resolve_lookup(self, context):
+ """
+ Performs resolution of a real variable (i.e. not a literal) against the
+ given context.
+
+ As indicated by the method's name, this method is an implementation
+ detail and shouldn't be called by external code. Use Variable.resolve()
+ instead.
+ """
+ current = context
+ for bit in self.lookups:
+ try: # dictionary lookup
+ current = current[bit]
+ except (TypeError, AttributeError, KeyError):
+ try: # attribute lookup
+ current = getattr(current, bit)
+ if callable(current):
+ if getattr(current, 'alters_data', False):
+ current = settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ try: # method call (assuming no args required)
+ current = current()
+ except TypeError: # arguments *were* required
+ # GOTCHA: This will also catch any TypeError
+ # raised in the function itself.
+ current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
+ except Exception, e:
+ if getattr(e, 'silent_variable_failure', False):
+ current = settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ raise
+ except (TypeError, AttributeError):
+ try: # list-index lookup
+ current = current[int(bit)]
+ except (IndexError, # list index out of range
+ ValueError, # invalid literal for int()
+ KeyError, # current is a dict without `int(bit)` key
+ TypeError, # unsubscriptable object
+ ):
+ raise VariableDoesNotExist("Failed lookup for key [%s] in %r", (bit, current)) # missing attribute
+ except Exception, e:
+ if getattr(e, 'silent_variable_failure', False):
+ current = settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ raise
+ except Exception, e:
+ if getattr(e, 'silent_variable_failure', False):
+ current = settings.TEMPLATE_STRING_IF_INVALID
+ else:
+ raise
+
+ return current
+
+class Node(object):
+ # Set this to True for nodes that must be first in the template (although
+ # they can be preceded by text nodes.
+ must_be_first = False
+ child_nodelists = ('nodelist',)
+
+ def render(self, context):
+ "Return the node rendered as a string"
+ pass
+
+ def __iter__(self):
+ yield self
+
+ def get_nodes_by_type(self, nodetype):
+ "Return a list of all nodes (within this node and its nodelist) of the given type"
+ nodes = []
+ if isinstance(self, nodetype):
+ nodes.append(self)
+ for attr in self.child_nodelists:
+ nodelist = getattr(self, attr, None)
+ if nodelist:
+ nodes.extend(nodelist.get_nodes_by_type(nodetype))
+ return nodes
+
+class NodeList(list):
+ # Set to True the first time a non-TextNode is inserted by
+ # extend_nodelist().
+ contains_nontext = False
+
+ def render(self, context):
+ bits = []
+ for node in self:
+ if isinstance(node, Node):
+ bits.append(self.render_node(node, context))
+ else:
+ bits.append(node)
+ return mark_safe(''.join([force_unicode(b) for b in bits]))
+
+ def get_nodes_by_type(self, nodetype):
+ "Return a list of all nodes of the given type"
+ nodes = []
+ for node in self:
+ nodes.extend(node.get_nodes_by_type(nodetype))
+ return nodes
+
+ def render_node(self, node, context):
+ return node.render(context)
+
+class TextNode(Node):
+ def __init__(self, s):
+ self.s = s
+
+ def __repr__(self):
+ return "<Text Node: '%s'>" % smart_str(self.s[:25], 'ascii',
+ errors='replace')
+
+ def render(self, context):
+ return self.s
+
+def _render_value_in_context(value, context):
+ """
+ Converts any value to a string to become part of a rendered template. This
+ means escaping, if required, and conversion to a unicode object. If value
+ is a string, it is expected to have already been translated.
+ """
+ value = localize(value)
+ value = force_unicode(value)
+ if (context.autoescape and not isinstance(value, SafeData)) or isinstance(value, EscapeData):
+ return escape(value)
+ else:
+ return value
+
+class VariableNode(Node):
+ def __init__(self, filter_expression):
+ self.filter_expression = filter_expression
+
+ def __repr__(self):
+ return "<Variable Node: %s>" % self.filter_expression
+
+ def render(self, context):
+ try:
+ output = self.filter_expression.resolve(context)
+ except UnicodeDecodeError:
+ # Unicode conversion can fail sometimes for reasons out of our
+ # control (e.g. exception rendering). In that case, we fail quietly.
+ return ''
+ return _render_value_in_context(output, context)
+
+def generic_tag_compiler(params, defaults, name, node_class, parser, token):
+ "Returns a template.Node subclass."
+ bits = token.split_contents()[1:]
+ bmax = len(params)
+ def_len = defaults and len(defaults) or 0
+ bmin = bmax - def_len
+ if(len(bits) < bmin or len(bits) > bmax):
+ if bmin == bmax:
+ message = "%s takes %s arguments" % (name, bmin)
+ else:
+ message = "%s takes between %s and %s arguments" % (name, bmin, bmax)
+ raise TemplateSyntaxError(message)
+ return node_class(bits)
+
+class Library(object):
+ def __init__(self):
+ self.filters = {}
+ self.tags = {}
+
+ def tag(self, name=None, compile_function=None):
+ if name == None and compile_function == None:
+ # @register.tag()
+ return self.tag_function
+ elif name != None and compile_function == None:
+ if(callable(name)):
+ # @register.tag
+ return self.tag_function(name)
+ else:
+ # @register.tag('somename') or @register.tag(name='somename')
+ def dec(func):
+ return self.tag(name, func)
+ return dec
+ elif name != None and compile_function != None:
+ # register.tag('somename', somefunc)
+ self.tags[name] = compile_function
+ return compile_function
+ else:
+ raise InvalidTemplateLibrary("Unsupported arguments to Library.tag: (%r, %r)", (name, compile_function))
+
+ def tag_function(self,func):
+ self.tags[getattr(func, "_decorated_function", func).__name__] = func
+ return func
+
+ def filter(self, name=None, filter_func=None):
+ if name == None and filter_func == None:
+ # @register.filter()
+ return self.filter_function
+ elif filter_func == None:
+ if(callable(name)):
+ # @register.filter
+ return self.filter_function(name)
+ else:
+ # @register.filter('somename') or @register.filter(name='somename')
+ def dec(func):
+ return self.filter(name, func)
+ return dec
+ elif name != None and filter_func != None:
+ # register.filter('somename', somefunc)
+ self.filters[name] = filter_func
+ return filter_func
+ else:
+ raise InvalidTemplateLibrary("Unsupported arguments to Library.filter: (%r, %r)", (name, filter_func))
+
+ def filter_function(self, func):
+ self.filters[getattr(func, "_decorated_function", func).__name__] = func
+ return func
+
+ def simple_tag(self,func):
+ params, xx, xxx, defaults = getargspec(func)
+
+ class SimpleNode(Node):
+ def __init__(self, vars_to_resolve):
+ self.vars_to_resolve = map(Variable, vars_to_resolve)
+
+ def render(self, context):
+ resolved_vars = [var.resolve(context) for var in self.vars_to_resolve]
+ return func(*resolved_vars)
+
+ compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, SimpleNode)
+ compile_func.__doc__ = func.__doc__
+ self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
+ return func
+
+ def inclusion_tag(self, file_name, context_class=Context, takes_context=False):
+ def dec(func):
+ params, xx, xxx, defaults = getargspec(func)
+ if takes_context:
+ if params[0] == 'context':
+ params = params[1:]
+ else:
+ raise TemplateSyntaxError("Any tag function decorated with takes_context=True must have a first argument of 'context'")
+
+ class InclusionNode(Node):
+ def __init__(self, vars_to_resolve):
+ self.vars_to_resolve = map(Variable, vars_to_resolve)
+
+ def render(self, context):
+ resolved_vars = [var.resolve(context) for var in self.vars_to_resolve]
+ if takes_context:
+ args = [context] + resolved_vars
+ else:
+ args = resolved_vars
+
+ dict = func(*args)
+
+ if not getattr(self, 'nodelist', False):
+ from google.appengine._internal.django.template.loader import get_template, select_template
+ if not isinstance(file_name, basestring) and is_iterable(file_name):
+ t = select_template(file_name)
+ else:
+ t = get_template(file_name)
+ self.nodelist = t.nodelist
+ new_context = context_class(dict, autoescape=context.autoescape)
+ # Copy across the CSRF token, if present, because inclusion
+ # tags are often used for forms, and we need instructions
+ # for using CSRF protection to be as simple as possible.
+ csrf_token = context.get('csrf_token', None)
+ if csrf_token is not None:
+ new_context['csrf_token'] = csrf_token
+ return self.nodelist.render(new_context)
+
+ compile_func = curry(generic_tag_compiler, params, defaults, getattr(func, "_decorated_function", func).__name__, InclusionNode)
+ compile_func.__doc__ = func.__doc__
+ self.tag(getattr(func, "_decorated_function", func).__name__, compile_func)
+ return func
+ return dec
+
+def import_library(taglib_module):
+ """Load a template tag library module.
+
+ Verifies that the library contains a 'register' attribute, and
+ returns that attribute as the representation of the library
+ """
+ app_path, taglib = taglib_module.rsplit('.',1)
+ app_module = import_module(app_path)
+ try:
+ mod = import_module(taglib_module)
+ except ImportError, e:
+ # If the ImportError is because the taglib submodule does not exist, that's not
+ # an error that should be raised. If the submodule exists and raised an ImportError
+ # on the attempt to load it, that we want to raise.
+ if not module_has_submodule(app_module, taglib):
+ return None
+ else:
+ raise InvalidTemplateLibrary("ImportError raised loading %s: %s" % (taglib_module, e))
+ try:
+ return mod.register
+ except AttributeError:
+ raise InvalidTemplateLibrary("Template library %s does not have a variable named 'register'" % taglib_module)
+
+templatetags_modules = []
+
+def get_templatetags_modules():
+ """Return the list of all available template tag modules.
+
+ Caches the result for faster access.
+ """
+ global templatetags_modules
+ if not templatetags_modules:
+ _templatetags_modules = []
+ # Populate list once per thread.
+ for app_module in ['google.appengine._internal.django'] + list(settings.INSTALLED_APPS):
+ try:
+ templatetag_module = '%s.templatetags' % app_module
+ import_module(templatetag_module)
+ _templatetags_modules.append(templatetag_module)
+ except ImportError:
+ continue
+ templatetags_modules = _templatetags_modules
+ return templatetags_modules
+
+def get_library(library_name):
+ """
+ Load the template library module with the given name.
+
+ If library is not already loaded loop over all templatetags modules to locate it.
+
+ {% load somelib %} and {% load someotherlib %} loops twice.
+
+ Subsequent loads eg. {% load somelib %} in the same process will grab the cached
+ module from libraries.
+ """
+ lib = libraries.get(library_name, None)
+ if not lib:
+ templatetags_modules = get_templatetags_modules()
+ tried_modules = []
+ for module in templatetags_modules:
+ taglib_module = '%s.%s' % (module, library_name)
+ tried_modules.append(taglib_module)
+ lib = import_library(taglib_module)
+ if lib:
+ libraries[library_name] = lib
+ break
+ if not lib:
+ raise InvalidTemplateLibrary("Template library %s not found, tried %s" % (library_name, ','.join(tried_modules)))
+ return lib
+
+def add_to_builtins(module):
+ builtins.append(import_library(module))
+
+add_to_builtins('google.appengine._internal.django.template.defaulttags')
+add_to_builtins('google.appengine._internal.django.template.defaultfilters')
diff --git a/google/appengine/_internal/django/template/context.py b/google/appengine/_internal/django/template/context.py
new file mode 100644
index 0000000..a46fc5c
--- /dev/null
+++ b/google/appengine/_internal/django/template/context.py
@@ -0,0 +1,149 @@
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.utils.importlib import import_module
+
+# Cache of actual callables.
+_standard_context_processors = None
+# We need the CSRF processor no matter what the user has in their settings,
+# because otherwise it is a security vulnerability, and we can't afford to leave
+# this to human error or failure to read migration instructions.
+_builtin_context_processors = ('google.appengine._internal.django.core.context_processors.csrf',)
+
+class ContextPopException(Exception):
+ "pop() has been called more times than push()"
+ pass
+
+class BaseContext(object):
+ def __init__(self, dict_=None):
+ dict_ = dict_ or {}
+ self.dicts = [dict_]
+
+ def __repr__(self):
+ return repr(self.dicts)
+
+ def __iter__(self):
+ for d in reversed(self.dicts):
+ yield d
+
+ def push(self):
+ d = {}
+ self.dicts.append(d)
+ return d
+
+ def pop(self):
+ if len(self.dicts) == 1:
+ raise ContextPopException
+ return self.dicts.pop()
+
+ def __setitem__(self, key, value):
+ "Set a variable in the current context"
+ self.dicts[-1][key] = value
+
+ def __getitem__(self, key):
+ "Get a variable's value, starting at the current context and going upward"
+ for d in reversed(self.dicts):
+ if key in d:
+ return d[key]
+ raise KeyError(key)
+
+ def __delitem__(self, key):
+ "Delete a variable from the current context"
+ del self.dicts[-1][key]
+
+ def has_key(self, key):
+ for d in self.dicts:
+ if key in d:
+ return True
+ return False
+
+ def __contains__(self, key):
+ return self.has_key(key)
+
+ def get(self, key, otherwise=None):
+ for d in reversed(self.dicts):
+ if key in d:
+ return d[key]
+ return otherwise
+
+class Context(BaseContext):
+ "A stack container for variable context"
+ def __init__(self, dict_=None, autoescape=True, current_app=None):
+ self.autoescape = autoescape
+ self.current_app = current_app
+ self.render_context = RenderContext()
+ super(Context, self).__init__(dict_)
+
+ def update(self, other_dict):
+ "Like dict.update(). Pushes an entire dictionary's keys and values onto the context."
+ if not hasattr(other_dict, '__getitem__'):
+ raise TypeError('other_dict must be a mapping (dictionary-like) object.')
+ self.dicts.append(other_dict)
+ return other_dict
+
+class RenderContext(BaseContext):
+ """
+ A stack container for storing Template state.
+
+ RenderContext simplifies the implementation of template Nodes by providing a
+ safe place to store state between invocations of a node's `render` method.
+
+ The RenderContext also provides scoping rules that are more sensible for
+ 'template local' variables. The render context stack is pushed before each
+ template is rendered, creating a fresh scope with nothing in it. Name
+ resolution fails if a variable is not found at the top of the RequestContext
+ stack. Thus, variables are local to a specific template and don't affect the
+ rendering of other templates as they would if they were stored in the normal
+ template context.
+ """
+ def __iter__(self):
+ for d in self.dicts[-1]:
+ yield d
+
+ def has_key(self, key):
+ return key in self.dicts[-1]
+
+ def get(self, key, otherwise=None):
+ d = self.dicts[-1]
+ if key in d:
+ return d[key]
+ return otherwise
+
+# This is a function rather than module-level procedural code because we only
+# want it to execute if somebody uses RequestContext.
+def get_standard_processors():
+ from google.appengine._internal.django.conf import settings
+ global _standard_context_processors
+ if _standard_context_processors is None:
+ processors = []
+ collect = []
+ collect.extend(_builtin_context_processors)
+ collect.extend(settings.TEMPLATE_CONTEXT_PROCESSORS)
+ for path in collect:
+ i = path.rfind('.')
+ module, attr = path[:i], path[i+1:]
+ try:
+ mod = import_module(module)
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing request processor module %s: "%s"' % (module, e))
+ try:
+ func = getattr(mod, attr)
+ except AttributeError:
+ raise ImproperlyConfigured('Module "%s" does not define a "%s" callable request processor' % (module, attr))
+ processors.append(func)
+ _standard_context_processors = tuple(processors)
+ return _standard_context_processors
+
+class RequestContext(Context):
+ """
+ This subclass of template.Context automatically populates itself using
+ the processors defined in TEMPLATE_CONTEXT_PROCESSORS.
+ Additional processors can be specified as a list of callables
+ using the "processors" keyword argument.
+ """
+ def __init__(self, request, dict=None, processors=None, current_app=None):
+ Context.__init__(self, dict, current_app=current_app)
+ if processors is None:
+ processors = ()
+ else:
+ processors = tuple(processors)
+ for processor in get_standard_processors() + processors:
+ self.update(processor(request))
diff --git a/google/appengine/_internal/django/template/debug.py b/google/appengine/_internal/django/template/debug.py
new file mode 100644
index 0000000..21dd411
--- /dev/null
+++ b/google/appengine/_internal/django/template/debug.py
@@ -0,0 +1,101 @@
+from google.appengine._internal.django.template import Lexer, Parser, tag_re, NodeList, VariableNode, TemplateSyntaxError
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.html import escape
+from google.appengine._internal.django.utils.safestring import SafeData, EscapeData
+from google.appengine._internal.django.utils.formats import localize
+
+class DebugLexer(Lexer):
+ def __init__(self, template_string, origin):
+ super(DebugLexer, self).__init__(template_string, origin)
+
+ def tokenize(self):
+ "Return a list of tokens from a given template_string"
+ result, upto = [], 0
+ for match in tag_re.finditer(self.template_string):
+ start, end = match.span()
+ if start > upto:
+ result.append(self.create_token(self.template_string[upto:start], (upto, start), False))
+ upto = start
+ result.append(self.create_token(self.template_string[start:end], (start, end), True))
+ upto = end
+ last_bit = self.template_string[upto:]
+ if last_bit:
+ result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), False))
+ return result
+
+ def create_token(self, token_string, source, in_tag):
+ token = super(DebugLexer, self).create_token(token_string, in_tag)
+ token.source = self.origin, source
+ return token
+
+class DebugParser(Parser):
+ def __init__(self, lexer):
+ super(DebugParser, self).__init__(lexer)
+ self.command_stack = []
+
+ def enter_command(self, command, token):
+ self.command_stack.append( (command, token.source) )
+
+ def exit_command(self):
+ self.command_stack.pop()
+
+ def error(self, token, msg):
+ return self.source_error(token.source, msg)
+
+ def source_error(self, source,msg):
+ e = TemplateSyntaxError(msg)
+ e.source = source
+ return e
+
+ def create_nodelist(self):
+ return DebugNodeList()
+
+ def create_variable_node(self, contents):
+ return DebugVariableNode(contents)
+
+ def extend_nodelist(self, nodelist, node, token):
+ node.source = token.source
+ super(DebugParser, self).extend_nodelist(nodelist, node, token)
+
+ def unclosed_block_tag(self, parse_until):
+ command, source = self.command_stack.pop()
+ msg = "Unclosed tag '%s'. Looking for one of: %s " % (command, ', '.join(parse_until))
+ raise self.source_error(source, msg)
+
+ def compile_function_error(self, token, e):
+ if not hasattr(e, 'source'):
+ e.source = token.source
+
+class DebugNodeList(NodeList):
+ def render_node(self, node, context):
+ try:
+ result = node.render(context)
+ except TemplateSyntaxError, e:
+ if not hasattr(e, 'source'):
+ e.source = node.source
+ raise
+ except Exception, e:
+ from sys import exc_info
+ wrapped = TemplateSyntaxError(u'Caught %s while rendering: %s' %
+ (e.__class__.__name__, force_unicode(e, errors='replace')))
+ wrapped.source = node.source
+ wrapped.exc_info = exc_info()
+ raise wrapped, None, wrapped.exc_info[2]
+ return result
+
+class DebugVariableNode(VariableNode):
+ def render(self, context):
+ try:
+ output = self.filter_expression.resolve(context)
+ output = localize(output)
+ output = force_unicode(output)
+ except TemplateSyntaxError, e:
+ if not hasattr(e, 'source'):
+ e.source = self.source
+ raise
+ except UnicodeDecodeError:
+ return ''
+ if (context.autoescape and not isinstance(output, SafeData)) or isinstance(output, EscapeData):
+ return escape(output)
+ else:
+ return output
diff --git a/google/appengine/_internal/django/template/defaultfilters.py b/google/appengine/_internal/django/template/defaultfilters.py
new file mode 100644
index 0000000..b924f0f
--- /dev/null
+++ b/google/appengine/_internal/django/template/defaultfilters.py
@@ -0,0 +1,920 @@
+"""Default variable filters."""
+
+import re
+from decimal import Decimal, InvalidOperation, ROUND_HALF_UP
+import random as random_module
+try:
+ from functools import wraps
+except ImportError:
+ from google.appengine._internal.django.utils.functional import wraps # Python 2.4 fallback.
+
+from google.appengine._internal.django.template import Variable, Library
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils import formats
+from google.appengine._internal.django.utils.encoding import force_unicode, iri_to_uri
+from google.appengine._internal.django.utils.html import conditional_escape
+from google.appengine._internal.django.utils.safestring import mark_safe, SafeData
+from google.appengine._internal.django.utils.translation import ugettext, ungettext
+
+register = Library()
+
+#######################
+# STRING DECORATOR #
+#######################
+
+def stringfilter(func):
+ """
+ Decorator for filters which should only receive unicode objects. The object
+ passed as the first positional argument will be converted to a unicode
+ object.
+ """
+ def _dec(*args, **kwargs):
+ if args:
+ args = list(args)
+ args[0] = force_unicode(args[0])
+ if isinstance(args[0], SafeData) and getattr(func, 'is_safe', False):
+ return mark_safe(func(*args, **kwargs))
+ return func(*args, **kwargs)
+
+ # Include a reference to the real function (used to check original
+ # arguments by the template parser).
+ _dec._decorated_function = getattr(func, '_decorated_function', func)
+ for attr in ('is_safe', 'needs_autoescape'):
+ if hasattr(func, attr):
+ setattr(_dec, attr, getattr(func, attr))
+ return wraps(func)(_dec)
+
+###################
+# STRINGS #
+###################
+
+def addslashes(value):
+ """
+ Adds slashes before quotes. Useful for escaping strings in CSV, for
+ example. Less useful for escaping JavaScript; use the ``escapejs``
+ filter instead.
+ """
+ return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
+addslashes.is_safe = True
+addslashes = stringfilter(addslashes)
+
+def capfirst(value):
+ """Capitalizes the first character of the value."""
+ return value and value[0].upper() + value[1:]
+capfirst.is_safe=True
+capfirst = stringfilter(capfirst)
+
+def escapejs(value):
+ """Hex encodes characters for use in JavaScript strings."""
+ from google.appengine._internal.django.utils.html import escapejs
+ return escapejs(value)
+escapejs = stringfilter(escapejs)
+
+def fix_ampersands(value):
+ """Replaces ampersands with ``&`` entities."""
+ from google.appengine._internal.django.utils.html import fix_ampersands
+ return fix_ampersands(value)
+fix_ampersands.is_safe=True
+fix_ampersands = stringfilter(fix_ampersands)
+
+# Values for testing floatformat input against infinity and NaN representations,
+# which differ across platforms and Python versions. Some (i.e. old Windows
+# ones) are not recognized by Decimal but we want to return them unchanged vs.
+# returning an empty string as we do for completley invalid input. Note these
+# need to be built up from values that are not inf/nan, since inf/nan values do
+# not reload properly from .pyc files on Windows prior to some level of Python 2.5
+# (see Python Issue757815 and Issue1080440).
+pos_inf = 1e200 * 1e200
+neg_inf = -1e200 * 1e200
+nan = (1e200 * 1e200) / (1e200 * 1e200)
+special_floats = [str(pos_inf), str(neg_inf), str(nan)]
+
+def floatformat(text, arg=-1):
+ """
+ Displays a float to a specified number of decimal places.
+
+ If called without an argument, it displays the floating point number with
+ one decimal place -- but only if there's a decimal place to be displayed:
+
+ * num1 = 34.23234
+ * num2 = 34.00000
+ * num3 = 34.26000
+ * {{ num1|floatformat }} displays "34.2"
+ * {{ num2|floatformat }} displays "34"
+ * {{ num3|floatformat }} displays "34.3"
+
+ If arg is positive, it will always display exactly arg number of decimal
+ places:
+
+ * {{ num1|floatformat:3 }} displays "34.232"
+ * {{ num2|floatformat:3 }} displays "34.000"
+ * {{ num3|floatformat:3 }} displays "34.260"
+
+ If arg is negative, it will display arg number of decimal places -- but
+ only if there are places to be displayed:
+
+ * {{ num1|floatformat:"-3" }} displays "34.232"
+ * {{ num2|floatformat:"-3" }} displays "34"
+ * {{ num3|floatformat:"-3" }} displays "34.260"
+
+ If the input float is infinity or NaN, the (platform-dependent) string
+ representation of that value will be displayed.
+ """
+
+ try:
+ input_val = force_unicode(text)
+ d = Decimal(input_val)
+ except UnicodeEncodeError:
+ return u''
+ except InvalidOperation:
+ if input_val in special_floats:
+ return input_val
+ try:
+ d = Decimal(force_unicode(float(text)))
+ except (ValueError, InvalidOperation, TypeError, UnicodeEncodeError):
+ return u''
+ try:
+ p = int(arg)
+ except ValueError:
+ return input_val
+
+ try:
+ m = int(d) - d
+ except (ValueError, OverflowError, InvalidOperation):
+ return input_val
+
+ if not m and p < 0:
+ return mark_safe(formats.number_format(u'%d' % (int(d)), 0))
+
+ if p == 0:
+ exp = Decimal(1)
+ else:
+ exp = Decimal('1.0') / (Decimal(10) ** abs(p))
+ try:
+ return mark_safe(formats.number_format(u'%s' % str(d.quantize(exp, ROUND_HALF_UP)), abs(p)))
+ except InvalidOperation:
+ return input_val
+floatformat.is_safe = True
+
+def iriencode(value):
+ """Escapes an IRI value for use in a URL."""
+ return force_unicode(iri_to_uri(value))
+iriencode.is_safe = True
+iriencode = stringfilter(iriencode)
+
+def linenumbers(value, autoescape=None):
+ """Displays text with line numbers."""
+ from google.appengine._internal.django.utils.html import escape
+ lines = value.split(u'\n')
+ # Find the maximum width of the line count, for use with zero padding
+ # string format command
+ width = unicode(len(unicode(len(lines))))
+ if not autoescape or isinstance(value, SafeData):
+ for i, line in enumerate(lines):
+ lines[i] = (u"%0" + width + u"d. %s") % (i + 1, line)
+ else:
+ for i, line in enumerate(lines):
+ lines[i] = (u"%0" + width + u"d. %s") % (i + 1, escape(line))
+ return mark_safe(u'\n'.join(lines))
+linenumbers.is_safe = True
+linenumbers.needs_autoescape = True
+linenumbers = stringfilter(linenumbers)
+
+def lower(value):
+ """Converts a string into all lowercase."""
+ return value.lower()
+lower.is_safe = True
+lower = stringfilter(lower)
+
+def make_list(value):
+ """
+ Returns the value turned into a list.
+
+ For an integer, it's a list of digits.
+ For a string, it's a list of characters.
+ """
+ return list(value)
+make_list.is_safe = False
+make_list = stringfilter(make_list)
+
+def slugify(value):
+ """
+ Normalizes string, converts to lowercase, removes non-alpha characters,
+ and converts spaces to hyphens.
+ """
+ import unicodedata
+ value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
+ value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
+ return mark_safe(re.sub('[-\s]+', '-', value))
+slugify.is_safe = True
+slugify = stringfilter(slugify)
+
+def stringformat(value, arg):
+ """
+ Formats the variable according to the arg, a string formatting specifier.
+
+ This specifier uses Python string formating syntax, with the exception that
+ the leading "%" is dropped.
+
+ See http://docs.python.org/lib/typesseq-strings.html for documentation
+ of Python string formatting
+ """
+ try:
+ return (u"%" + unicode(arg)) % value
+ except (ValueError, TypeError):
+ return u""
+stringformat.is_safe = True
+
+def title(value):
+ """Converts a string into titlecase."""
+ t = re.sub("([a-z])'([A-Z])", lambda m: m.group(0).lower(), value.title())
+ return re.sub("\d([A-Z])", lambda m: m.group(0).lower(), t)
+title.is_safe = True
+title = stringfilter(title)
+
+def truncatewords(value, arg):
+ """
+ Truncates a string after a certain number of words.
+
+ Argument: Number of words to truncate after.
+
+ Newlines within the string are removed.
+ """
+ from google.appengine._internal.django.utils.text import truncate_words
+ try:
+ length = int(arg)
+ except ValueError: # Invalid literal for int().
+ return value # Fail silently.
+ return truncate_words(value, length)
+truncatewords.is_safe = True
+truncatewords = stringfilter(truncatewords)
+
+def truncatewords_html(value, arg):
+ """
+ Truncates HTML after a certain number of words.
+
+ Argument: Number of words to truncate after.
+
+ Newlines in the HTML are preserved.
+ """
+ from google.appengine._internal.django.utils.text import truncate_html_words
+ try:
+ length = int(arg)
+ except ValueError: # invalid literal for int()
+ return value # Fail silently.
+ return truncate_html_words(value, length)
+truncatewords_html.is_safe = True
+truncatewords_html = stringfilter(truncatewords_html)
+
+def upper(value):
+ """Converts a string into all uppercase."""
+ return value.upper()
+upper.is_safe = False
+upper = stringfilter(upper)
+
+def urlencode(value):
+ """Escapes a value for use in a URL."""
+ from google.appengine._internal.django.utils.http import urlquote
+ return urlquote(value)
+urlencode.is_safe = False
+urlencode = stringfilter(urlencode)
+
+def urlize(value, autoescape=None):
+ """Converts URLs in plain text into clickable links."""
+ from google.appengine._internal.django.utils.html import urlize
+ return mark_safe(urlize(value, nofollow=True, autoescape=autoescape))
+urlize.is_safe=True
+urlize.needs_autoescape = True
+urlize = stringfilter(urlize)
+
+def urlizetrunc(value, limit, autoescape=None):
+ """
+ Converts URLs into clickable links, truncating URLs to the given character
+ limit, and adding 'rel=nofollow' attribute to discourage spamming.
+
+ Argument: Length to truncate URLs to.
+ """
+ from google.appengine._internal.django.utils.html import urlize
+ return mark_safe(urlize(value, trim_url_limit=int(limit), nofollow=True,
+ autoescape=autoescape))
+urlizetrunc.is_safe = True
+urlizetrunc.needs_autoescape = True
+urlizetrunc = stringfilter(urlizetrunc)
+
+def wordcount(value):
+ """Returns the number of words."""
+ return len(value.split())
+wordcount.is_safe = False
+wordcount = stringfilter(wordcount)
+
+def wordwrap(value, arg):
+ """
+ Wraps words at specified line length.
+
+ Argument: number of characters to wrap the text at.
+ """
+ from google.appengine._internal.django.utils.text import wrap
+ return wrap(value, int(arg))
+wordwrap.is_safe = True
+wordwrap = stringfilter(wordwrap)
+
+def ljust(value, arg):
+ """
+ Left-aligns the value in a field of a given width.
+
+ Argument: field size.
+ """
+ return value.ljust(int(arg))
+ljust.is_safe = True
+ljust = stringfilter(ljust)
+
+def rjust(value, arg):
+ """
+ Right-aligns the value in a field of a given width.
+
+ Argument: field size.
+ """
+ return value.rjust(int(arg))
+rjust.is_safe = True
+rjust = stringfilter(rjust)
+
+def center(value, arg):
+ """Centers the value in a field of a given width."""
+ return value.center(int(arg))
+center.is_safe = True
+center = stringfilter(center)
+
+def cut(value, arg):
+ """
+ Removes all values of arg from the given string.
+ """
+ safe = isinstance(value, SafeData)
+ value = value.replace(arg, u'')
+ if safe and arg != ';':
+ return mark_safe(value)
+ return value
+cut = stringfilter(cut)
+
+###################
+# HTML STRINGS #
+###################
+
+def escape(value):
+ """
+ Marks the value as a string that should not be auto-escaped.
+ """
+ from google.appengine._internal.django.utils.safestring import mark_for_escaping
+ return mark_for_escaping(value)
+escape.is_safe = True
+escape = stringfilter(escape)
+
+def force_escape(value):
+ """
+ Escapes a string's HTML. This returns a new string containing the escaped
+ characters (as opposed to "escape", which marks the content for later
+ possible escaping).
+ """
+ from google.appengine._internal.django.utils.html import escape
+ return mark_safe(escape(value))
+force_escape = stringfilter(force_escape)
+force_escape.is_safe = True
+
+def linebreaks(value, autoescape=None):
+ """
+ Replaces line breaks in plain text with appropriate HTML; a single
+ newline becomes an HTML line break (``<br />``) and a new line
+ followed by a blank line becomes a paragraph break (``</p>``).
+ """
+ from google.appengine._internal.django.utils.html import linebreaks
+ autoescape = autoescape and not isinstance(value, SafeData)
+ return mark_safe(linebreaks(value, autoescape))
+linebreaks.is_safe = True
+linebreaks.needs_autoescape = True
+linebreaks = stringfilter(linebreaks)
+
+def linebreaksbr(value, autoescape=None):
+ """
+ Converts all newlines in a piece of plain text to HTML line breaks
+ (``<br />``).
+ """
+ if autoescape and not isinstance(value, SafeData):
+ from google.appengine._internal.django.utils.html import escape
+ value = escape(value)
+ return mark_safe(value.replace('\n', '<br />'))
+linebreaksbr.is_safe = True
+linebreaksbr.needs_autoescape = True
+linebreaksbr = stringfilter(linebreaksbr)
+
+def safe(value):
+ """
+ Marks the value as a string that should not be auto-escaped.
+ """
+ return mark_safe(value)
+safe.is_safe = True
+safe = stringfilter(safe)
+
+def safeseq(value):
+ """
+ A "safe" filter for sequences. Marks each element in the sequence,
+ individually, as safe, after converting them to unicode. Returns a list
+ with the results.
+ """
+ return [mark_safe(force_unicode(obj)) for obj in value]
+safeseq.is_safe = True
+
+def removetags(value, tags):
+ """Removes a space separated list of [X]HTML tags from the output."""
+ tags = [re.escape(tag) for tag in tags.split()]
+ tags_re = u'(%s)' % u'|'.join(tags)
+ starttag_re = re.compile(ur'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U)
+ endtag_re = re.compile(u'</%s>' % tags_re)
+ value = starttag_re.sub(u'', value)
+ value = endtag_re.sub(u'', value)
+ return value
+removetags.is_safe = True
+removetags = stringfilter(removetags)
+
+def striptags(value):
+ """Strips all [X]HTML tags."""
+ from google.appengine._internal.django.utils.html import strip_tags
+ return strip_tags(value)
+striptags.is_safe = True
+striptags = stringfilter(striptags)
+
+###################
+# LISTS #
+###################
+
+def dictsort(value, arg):
+ """
+ Takes a list of dicts, returns that list sorted by the property given in
+ the argument.
+ """
+ var_resolve = Variable(arg).resolve
+ decorated = [(var_resolve(item), item) for item in value]
+ decorated.sort()
+ return [item[1] for item in decorated]
+dictsort.is_safe = False
+
+def dictsortreversed(value, arg):
+ """
+ Takes a list of dicts, returns that list sorted in reverse order by the
+ property given in the argument.
+ """
+ var_resolve = Variable(arg).resolve
+ decorated = [(var_resolve(item), item) for item in value]
+ decorated.sort()
+ decorated.reverse()
+ return [item[1] for item in decorated]
+dictsortreversed.is_safe = False
+
+def first(value):
+ """Returns the first item in a list."""
+ try:
+ return value[0]
+ except IndexError:
+ return u''
+first.is_safe = False
+
+def join(value, arg, autoescape=None):
+ """
+ Joins a list with a string, like Python's ``str.join(list)``.
+ """
+ value = map(force_unicode, value)
+ if autoescape:
+ value = [conditional_escape(v) for v in value]
+ try:
+ data = conditional_escape(arg).join(value)
+ except AttributeError: # fail silently but nicely
+ return value
+ return mark_safe(data)
+join.is_safe = True
+join.needs_autoescape = True
+
+def last(value):
+ "Returns the last item in a list"
+ try:
+ return value[-1]
+ except IndexError:
+ return u''
+last.is_safe = True
+
+def length(value):
+ """Returns the length of the value - useful for lists."""
+ try:
+ return len(value)
+ except (ValueError, TypeError):
+ return ''
+length.is_safe = True
+
+def length_is(value, arg):
+ """Returns a boolean of whether the value's length is the argument."""
+ try:
+ return len(value) == int(arg)
+ except (ValueError, TypeError):
+ return ''
+length_is.is_safe = False
+
+def random(value):
+ """Returns a random item from the list."""
+ return random_module.choice(value)
+random.is_safe = True
+
+def slice_(value, arg):
+ """
+ Returns a slice of the list.
+
+ Uses the same syntax as Python's list slicing; see
+ http://diveintopython.org/native_data_types/lists.html#odbchelper.list.slice
+ for an introduction.
+ """
+ try:
+ bits = []
+ for x in arg.split(u':'):
+ if len(x) == 0:
+ bits.append(None)
+ else:
+ bits.append(int(x))
+ return value[slice(*bits)]
+
+ except (ValueError, TypeError):
+ return value # Fail silently.
+slice_.is_safe = True
+
+def unordered_list(value, autoescape=None):
+ """
+ Recursively takes a self-nested list and returns an HTML unordered list --
+ WITHOUT opening and closing <ul> tags.
+
+ The list is assumed to be in the proper format. For example, if ``var``
+ contains: ``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``,
+ then ``{{ var|unordered_list }}`` would return::
+
+ <li>States
+ <ul>
+ <li>Kansas
+ <ul>
+ <li>Lawrence</li>
+ <li>Topeka</li>
+ </ul>
+ </li>
+ <li>Illinois</li>
+ </ul>
+ </li>
+ """
+ if autoescape:
+ from google.appengine._internal.django.utils.html import conditional_escape
+ escaper = conditional_escape
+ else:
+ escaper = lambda x: x
+ def convert_old_style_list(list_):
+ """
+ Converts old style lists to the new easier to understand format.
+
+ The old list format looked like:
+ ['Item 1', [['Item 1.1', []], ['Item 1.2', []]]
+
+ And it is converted to:
+ ['Item 1', ['Item 1.1', 'Item 1.2]]
+ """
+ if not isinstance(list_, (tuple, list)) or len(list_) != 2:
+ return list_, False
+ first_item, second_item = list_
+ if second_item == []:
+ return [first_item], True
+ try:
+ it = iter(second_item) # see if second item is iterable
+ except TypeError:
+ return list_, False
+ old_style_list = True
+ new_second_item = []
+ for sublist in second_item:
+ item, old_style_list = convert_old_style_list(sublist)
+ if not old_style_list:
+ break
+ new_second_item.extend(item)
+ if old_style_list:
+ second_item = new_second_item
+ return [first_item, second_item], old_style_list
+ def _helper(list_, tabs=1):
+ indent = u'\t' * tabs
+ output = []
+
+ list_length = len(list_)
+ i = 0
+ while i < list_length:
+ title = list_[i]
+ sublist = ''
+ sublist_item = None
+ if isinstance(title, (list, tuple)):
+ sublist_item = title
+ title = ''
+ elif i < list_length - 1:
+ next_item = list_[i+1]
+ if next_item and isinstance(next_item, (list, tuple)):
+ # The next item is a sub-list.
+ sublist_item = next_item
+ # We've processed the next item now too.
+ i += 1
+ if sublist_item:
+ sublist = _helper(sublist_item, tabs+1)
+ sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % (indent, sublist,
+ indent, indent)
+ output.append('%s<li>%s%s</li>' % (indent,
+ escaper(force_unicode(title)), sublist))
+ i += 1
+ return '\n'.join(output)
+ value, converted = convert_old_style_list(value)
+ return mark_safe(_helper(value))
+unordered_list.is_safe = True
+unordered_list.needs_autoescape = True
+
+###################
+# INTEGERS #
+###################
+
+def add(value, arg):
+ """Adds the arg to the value."""
+ try:
+ return int(value) + int(arg)
+ except (ValueError, TypeError):
+ try:
+ return value + arg
+ except:
+ return value
+add.is_safe = False
+
+def get_digit(value, arg):
+ """
+ Given a whole number, returns the requested digit of it, where 1 is the
+ right-most digit, 2 is the second-right-most digit, etc. Returns the
+ original value for invalid input (if input or argument is not an integer,
+ or if argument is less than 1). Otherwise, output is always an integer.
+ """
+ try:
+ arg = int(arg)
+ value = int(value)
+ except ValueError:
+ return value # Fail silently for an invalid argument
+ if arg < 1:
+ return value
+ try:
+ return int(str(value)[-arg])
+ except IndexError:
+ return 0
+get_digit.is_safe = False
+
+###################
+# DATES #
+###################
+
+def date(value, arg=None):
+ """Formats a date according to the given format."""
+ from google.appengine._internal.django.utils.dateformat import format
+ if not value:
+ return u''
+ if arg is None:
+ arg = settings.DATE_FORMAT
+ try:
+ return formats.date_format(value, arg)
+ except AttributeError:
+ try:
+ return format(value, arg)
+ except AttributeError:
+ return ''
+date.is_safe = False
+
+def time(value, arg=None):
+ """Formats a time according to the given format."""
+ from google.appengine._internal.django.utils import dateformat
+ if value in (None, u''):
+ return u''
+ if arg is None:
+ arg = settings.TIME_FORMAT
+ try:
+ return formats.time_format(value, arg)
+ except AttributeError:
+ try:
+ return dateformat.time_format(value, arg)
+ except AttributeError:
+ return ''
+time.is_safe = False
+
+def timesince(value, arg=None):
+ """Formats a date as the time since that date (i.e. "4 days, 6 hours")."""
+ from google.appengine._internal.django.utils.timesince import timesince
+ if not value:
+ return u''
+ try:
+ if arg:
+ return timesince(value, arg)
+ return timesince(value)
+ except (ValueError, TypeError):
+ return u''
+timesince.is_safe = False
+
+def timeuntil(value, arg=None):
+ """Formats a date as the time until that date (i.e. "4 days, 6 hours")."""
+ from google.appengine._internal.django.utils.timesince import timeuntil
+ if not value:
+ return u''
+ try:
+ return timeuntil(value, arg)
+ except (ValueError, TypeError):
+ return u''
+timeuntil.is_safe = False
+
+###################
+# LOGIC #
+###################
+
+def default(value, arg):
+ """If value is unavailable, use given default."""
+ return value or arg
+default.is_safe = False
+
+def default_if_none(value, arg):
+ """If value is None, use given default."""
+ if value is None:
+ return arg
+ return value
+default_if_none.is_safe = False
+
+def divisibleby(value, arg):
+ """Returns True if the value is devisible by the argument."""
+ return int(value) % int(arg) == 0
+divisibleby.is_safe = False
+
+def yesno(value, arg=None):
+ """
+ Given a string mapping values for true, false and (optionally) None,
+ returns one of those strings accoding to the value:
+
+ ========== ====================== ==================================
+ Value Argument Outputs
+ ========== ====================== ==================================
+ ``True`` ``"yeah,no,maybe"`` ``yeah``
+ ``False`` ``"yeah,no,maybe"`` ``no``
+ ``None`` ``"yeah,no,maybe"`` ``maybe``
+ ``None`` ``"yeah,no"`` ``"no"`` (converts None to False
+ if no mapping for None is given.
+ ========== ====================== ==================================
+ """
+ if arg is None:
+ arg = ugettext('yes,no,maybe')
+ bits = arg.split(u',')
+ if len(bits) < 2:
+ return value # Invalid arg.
+ try:
+ yes, no, maybe = bits
+ except ValueError:
+ # Unpack list of wrong size (no "maybe" value provided).
+ yes, no, maybe = bits[0], bits[1], bits[1]
+ if value is None:
+ return maybe
+ if value:
+ return yes
+ return no
+yesno.is_safe = False
+
+###################
+# MISC #
+###################
+
+def filesizeformat(bytes):
+ """
+ Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
+ 102 bytes, etc).
+ """
+ try:
+ bytes = float(bytes)
+ except (TypeError,ValueError,UnicodeDecodeError):
+ return ungettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}
+
+ filesize_number_format = lambda value: formats.number_format(round(value, 1), 1)
+
+ if bytes < 1024:
+ return ungettext("%(size)d byte", "%(size)d bytes", bytes) % {'size': bytes}
+ if bytes < 1024 * 1024:
+ return ugettext("%s KB") % filesize_number_format(bytes / 1024)
+ if bytes < 1024 * 1024 * 1024:
+ return ugettext("%s MB") % filesize_number_format(bytes / (1024 * 1024))
+ return ugettext("%s GB") % filesize_number_format(bytes / (1024 * 1024 * 1024))
+filesizeformat.is_safe = True
+
+def pluralize(value, arg=u's'):
+ """
+ Returns a plural suffix if the value is not 1. By default, 's' is used as
+ the suffix:
+
+ * If value is 0, vote{{ value|pluralize }} displays "0 votes".
+ * If value is 1, vote{{ value|pluralize }} displays "1 vote".
+ * If value is 2, vote{{ value|pluralize }} displays "2 votes".
+
+ If an argument is provided, that string is used instead:
+
+ * If value is 0, class{{ value|pluralize:"es" }} displays "0 classes".
+ * If value is 1, class{{ value|pluralize:"es" }} displays "1 class".
+ * If value is 2, class{{ value|pluralize:"es" }} displays "2 classes".
+
+ If the provided argument contains a comma, the text before the comma is
+ used for the singular case and the text after the comma is used for the
+ plural case:
+
+ * If value is 0, cand{{ value|pluralize:"y,ies" }} displays "0 candies".
+ * If value is 1, cand{{ value|pluralize:"y,ies" }} displays "1 candy".
+ * If value is 2, cand{{ value|pluralize:"y,ies" }} displays "2 candies".
+ """
+ if not u',' in arg:
+ arg = u',' + arg
+ bits = arg.split(u',')
+ if len(bits) > 2:
+ return u''
+ singular_suffix, plural_suffix = bits[:2]
+
+ try:
+ if int(value) != 1:
+ return plural_suffix
+ except ValueError: # Invalid string that's not a number.
+ pass
+ except TypeError: # Value isn't a string or a number; maybe it's a list?
+ try:
+ if len(value) != 1:
+ return plural_suffix
+ except TypeError: # len() of unsized object.
+ pass
+ return singular_suffix
+pluralize.is_safe = False
+
+def phone2numeric(value):
+ """Takes a phone number and converts it in to its numerical equivalent."""
+ from google.appengine._internal.django.utils.text import phone2numeric
+ return phone2numeric(value)
+phone2numeric.is_safe = True
+
+def pprint(value):
+ """A wrapper around pprint.pprint -- for debugging, really."""
+ from pprint import pformat
+ try:
+ return pformat(value)
+ except Exception, e:
+ return u"Error in formatting: %s" % force_unicode(e, errors="replace")
+pprint.is_safe = True
+
+# Syntax: register.filter(name of filter, callback)
+register.filter(add)
+register.filter(addslashes)
+register.filter(capfirst)
+register.filter(center)
+register.filter(cut)
+register.filter(date)
+register.filter(default)
+register.filter(default_if_none)
+register.filter(dictsort)
+register.filter(dictsortreversed)
+register.filter(divisibleby)
+register.filter(escape)
+register.filter(escapejs)
+register.filter(filesizeformat)
+register.filter(first)
+register.filter(fix_ampersands)
+register.filter(floatformat)
+register.filter(force_escape)
+register.filter(get_digit)
+register.filter(iriencode)
+register.filter(join)
+register.filter(last)
+register.filter(length)
+register.filter(length_is)
+register.filter(linebreaks)
+register.filter(linebreaksbr)
+register.filter(linenumbers)
+register.filter(ljust)
+register.filter(lower)
+register.filter(make_list)
+register.filter(phone2numeric)
+register.filter(pluralize)
+register.filter(pprint)
+register.filter(removetags)
+register.filter(random)
+register.filter(rjust)
+register.filter(safe)
+register.filter(safeseq)
+register.filter('slice', slice_)
+register.filter(slugify)
+register.filter(stringformat)
+register.filter(striptags)
+register.filter(time)
+register.filter(timesince)
+register.filter(timeuntil)
+register.filter(title)
+register.filter(truncatewords)
+register.filter(truncatewords_html)
+register.filter(unordered_list)
+register.filter(upper)
+register.filter(urlencode)
+register.filter(urlize)
+register.filter(urlizetrunc)
+register.filter(wordcount)
+register.filter(wordwrap)
+register.filter(yesno)
diff --git a/google/appengine/_internal/django/template/defaulttags.py b/google/appengine/_internal/django/template/defaulttags.py
new file mode 100644
index 0000000..e9a9d2e
--- /dev/null
+++ b/google/appengine/_internal/django/template/defaulttags.py
@@ -0,0 +1,1217 @@
+"""Default tags used by the template system, available to all templates."""
+
+import sys
+import re
+from itertools import groupby, cycle as itertools_cycle
+
+from google.appengine._internal.django.template import Node, NodeList, Template, Context, Variable
+from google.appengine._internal.django.template import TemplateSyntaxError, VariableDoesNotExist, BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END, SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END
+from google.appengine._internal.django.template import get_library, Library, InvalidTemplateLibrary
+from google.appengine._internal.django.template.smartif import IfParser, Literal
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils.encoding import smart_str, smart_unicode
+from google.appengine._internal.django.utils.safestring import mark_safe
+
+register = Library()
+# Regex for token keyword arguments
+kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
+
+class AutoEscapeControlNode(Node):
+ """Implements the actions of the autoescape tag."""
+ def __init__(self, setting, nodelist):
+ self.setting, self.nodelist = setting, nodelist
+
+ def render(self, context):
+ old_setting = context.autoescape
+ context.autoescape = self.setting
+ output = self.nodelist.render(context)
+ context.autoescape = old_setting
+ if self.setting:
+ return mark_safe(output)
+ else:
+ return output
+
+class CommentNode(Node):
+ def render(self, context):
+ return ''
+
+class CsrfTokenNode(Node):
+ def render(self, context):
+ csrf_token = context.get('csrf_token', None)
+ if csrf_token:
+ if csrf_token == 'NOTPROVIDED':
+ return mark_safe(u"")
+ else:
+ return mark_safe(u"<div style='display:none'><input type='hidden' name='csrfmiddlewaretoken' value='%s' /></div>" % csrf_token)
+ else:
+ # It's very probable that the token is missing because of
+ # misconfiguration, so we raise a warning
+ from google.appengine._internal.django.conf import settings
+ if settings.DEBUG:
+ import warnings
+ warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
+ return u''
+
+class CycleNode(Node):
+ def __init__(self, cyclevars, variable_name=None):
+ self.cyclevars = cyclevars
+ self.variable_name = variable_name
+
+ def render(self, context):
+ if self not in context.render_context:
+ context.render_context[self] = itertools_cycle(self.cyclevars)
+ cycle_iter = context.render_context[self]
+ value = cycle_iter.next().resolve(context)
+ if self.variable_name:
+ context[self.variable_name] = value
+ return value
+
+class DebugNode(Node):
+ def render(self, context):
+ from pprint import pformat
+ output = [pformat(val) for val in context]
+ output.append('\n\n')
+ output.append(pformat(sys.modules))
+ return ''.join(output)
+
+class FilterNode(Node):
+ def __init__(self, filter_expr, nodelist):
+ self.filter_expr, self.nodelist = filter_expr, nodelist
+
+ def render(self, context):
+ output = self.nodelist.render(context)
+ # Apply filters.
+ context.update({'var': output})
+ filtered = self.filter_expr.resolve(context)
+ context.pop()
+ return filtered
+
+class FirstOfNode(Node):
+ def __init__(self, vars):
+ self.vars = vars
+
+ def render(self, context):
+ for var in self.vars:
+ value = var.resolve(context, True)
+ if value:
+ return smart_unicode(value)
+ return u''
+
+class ForNode(Node):
+ child_nodelists = ('nodelist_loop', 'nodelist_empty')
+
+ def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
+ self.loopvars, self.sequence = loopvars, sequence
+ self.is_reversed = is_reversed
+ self.nodelist_loop = nodelist_loop
+ if nodelist_empty is None:
+ self.nodelist_empty = NodeList()
+ else:
+ self.nodelist_empty = nodelist_empty
+
+ def __repr__(self):
+ reversed_text = self.is_reversed and ' reversed' or ''
+ return "<For Node: for %s in %s, tail_len: %d%s>" % (', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
+ reversed_text)
+
+ def __iter__(self):
+ for node in self.nodelist_loop:
+ yield node
+ for node in self.nodelist_empty:
+ yield node
+
+ def render(self, context):
+ if 'forloop' in context:
+ parentloop = context['forloop']
+ else:
+ parentloop = {}
+ context.push()
+ try:
+ values = self.sequence.resolve(context, True)
+ except VariableDoesNotExist:
+ values = []
+ if values is None:
+ values = []
+ if not hasattr(values, '__len__'):
+ values = list(values)
+ len_values = len(values)
+ if len_values < 1:
+ context.pop()
+ return self.nodelist_empty.render(context)
+ nodelist = NodeList()
+ if self.is_reversed:
+ values = reversed(values)
+ unpack = len(self.loopvars) > 1
+ # Create a forloop value in the context. We'll update counters on each
+ # iteration just below.
+ loop_dict = context['forloop'] = {'parentloop': parentloop}
+ for i, item in enumerate(values):
+ # Shortcuts for current loop iteration number.
+ loop_dict['counter0'] = i
+ loop_dict['counter'] = i+1
+ # Reverse counter iteration numbers.
+ loop_dict['revcounter'] = len_values - i
+ loop_dict['revcounter0'] = len_values - i - 1
+ # Boolean values designating first and last times through loop.
+ loop_dict['first'] = (i == 0)
+ loop_dict['last'] = (i == len_values - 1)
+
+ pop_context = False
+ if unpack:
+ # If there are multiple loop variables, unpack the item into
+ # them.
+ try:
+ unpacked_vars = dict(zip(self.loopvars, item))
+ except TypeError:
+ pass
+ else:
+ pop_context = True
+ context.update(unpacked_vars)
+ else:
+ context[self.loopvars[0]] = item
+ for node in self.nodelist_loop:
+ nodelist.append(node.render(context))
+ if pop_context:
+ # The loop variables were pushed on to the context so pop them
+ # off again. This is necessary because the tag lets the length
+ # of loopvars differ to the length of each set of items and we
+ # don't want to leave any vars from the previous loop on the
+ # context.
+ context.pop()
+ context.pop()
+ return nodelist.render(context)
+
+class IfChangedNode(Node):
+ child_nodelists = ('nodelist_true', 'nodelist_false')
+
+ def __init__(self, nodelist_true, nodelist_false, *varlist):
+ self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
+ self._last_seen = None
+ self._varlist = varlist
+ self._id = str(id(self))
+
+ def render(self, context):
+ if 'forloop' in context and self._id not in context['forloop']:
+ self._last_seen = None
+ context['forloop'][self._id] = 1
+ try:
+ if self._varlist:
+ # Consider multiple parameters. This automatically behaves
+ # like an OR evaluation of the multiple variables.
+ compare_to = [var.resolve(context, True) for var in self._varlist]
+ else:
+ compare_to = self.nodelist_true.render(context)
+ except VariableDoesNotExist:
+ compare_to = None
+
+ if compare_to != self._last_seen:
+ firstloop = (self._last_seen == None)
+ self._last_seen = compare_to
+ content = self.nodelist_true.render(context)
+ return content
+ elif self.nodelist_false:
+ return self.nodelist_false.render(context)
+ return ''
+
+class IfEqualNode(Node):
+ child_nodelists = ('nodelist_true', 'nodelist_false')
+
+ def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
+ self.var1, self.var2 = var1, var2
+ self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
+ self.negate = negate
+
+ def __repr__(self):
+ return "<IfEqualNode>"
+
+ def render(self, context):
+ val1 = self.var1.resolve(context, True)
+ val2 = self.var2.resolve(context, True)
+ if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
+ return self.nodelist_true.render(context)
+ return self.nodelist_false.render(context)
+
+class IfNode(Node):
+ child_nodelists = ('nodelist_true', 'nodelist_false')
+
+ def __init__(self, var, nodelist_true, nodelist_false=None):
+ self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
+ self.var = var
+
+ def __repr__(self):
+ return "<If node>"
+
+ def __iter__(self):
+ for node in self.nodelist_true:
+ yield node
+ for node in self.nodelist_false:
+ yield node
+
+ def render(self, context):
+ try:
+ var = self.var.eval(context)
+ except VariableDoesNotExist:
+ var = None
+
+ if var:
+ return self.nodelist_true.render(context)
+ else:
+ return self.nodelist_false.render(context)
+
+class RegroupNode(Node):
+ def __init__(self, target, expression, var_name):
+ self.target, self.expression = target, expression
+ self.var_name = var_name
+
+ def render(self, context):
+ obj_list = self.target.resolve(context, True)
+ if obj_list == None:
+ # target variable wasn't found in context; fail silently.
+ context[self.var_name] = []
+ return ''
+ # List of dictionaries in the format:
+ # {'grouper': 'key', 'list': [list of contents]}.
+ context[self.var_name] = [
+ {'grouper': key, 'list': list(val)}
+ for key, val in
+ groupby(obj_list, lambda v, f=self.expression.resolve: f(v, True))
+ ]
+ return ''
+
+def include_is_allowed(filepath):
+ for root in settings.ALLOWED_INCLUDE_ROOTS:
+ if filepath.startswith(root):
+ return True
+ return False
+
+class SsiNode(Node):
+ def __init__(self, filepath, parsed):
+ self.filepath, self.parsed = filepath, parsed
+
+ def render(self, context):
+ if not include_is_allowed(self.filepath):
+ if settings.DEBUG:
+ return "[Didn't have permission to include file]"
+ else:
+ return '' # Fail silently for invalid includes.
+ try:
+ fp = open(self.filepath, 'r')
+ output = fp.read()
+ fp.close()
+ except IOError:
+ output = ''
+ if self.parsed:
+ try:
+ t = Template(output, name=self.filepath)
+ return t.render(context)
+ except TemplateSyntaxError, e:
+ if settings.DEBUG:
+ return "[Included template had syntax error: %s]" % e
+ else:
+ return '' # Fail silently for invalid included templates.
+ return output
+
+class LoadNode(Node):
+ def render(self, context):
+ return ''
+
+class NowNode(Node):
+ def __init__(self, format_string):
+ self.format_string = format_string
+
+ def render(self, context):
+ from datetime import datetime
+ from google.appengine._internal.django.utils.dateformat import DateFormat
+ df = DateFormat(datetime.now())
+ return df.format(self.format_string)
+
+class SpacelessNode(Node):
+ def __init__(self, nodelist):
+ self.nodelist = nodelist
+
+ def render(self, context):
+ from google.appengine._internal.django.utils.html import strip_spaces_between_tags
+ return strip_spaces_between_tags(self.nodelist.render(context).strip())
+
+class TemplateTagNode(Node):
+ mapping = {'openblock': BLOCK_TAG_START,
+ 'closeblock': BLOCK_TAG_END,
+ 'openvariable': VARIABLE_TAG_START,
+ 'closevariable': VARIABLE_TAG_END,
+ 'openbrace': SINGLE_BRACE_START,
+ 'closebrace': SINGLE_BRACE_END,
+ 'opencomment': COMMENT_TAG_START,
+ 'closecomment': COMMENT_TAG_END,
+ }
+
+ def __init__(self, tagtype):
+ self.tagtype = tagtype
+
+ def render(self, context):
+ return self.mapping.get(self.tagtype, '')
+
+class URLNode(Node):
+ def __init__(self, view_name, args, kwargs, asvar):
+ self.view_name = view_name
+ self.args = args
+ self.kwargs = kwargs
+ self.asvar = asvar
+
+ def render(self, context):
+ from google.appengine._internal.django.core.urlresolvers import reverse, NoReverseMatch
+ args = [arg.resolve(context) for arg in self.args]
+ kwargs = dict([(smart_str(k,'ascii'), v.resolve(context))
+ for k, v in self.kwargs.items()])
+
+ # Try to look up the URL twice: once given the view name, and again
+ # relative to what we guess is the "main" app. If they both fail,
+ # re-raise the NoReverseMatch unless we're using the
+ # {% url ... as var %} construct in which cause return nothing.
+ url = ''
+ try:
+ url = reverse(self.view_name, args=args, kwargs=kwargs, current_app=context.current_app)
+ except NoReverseMatch, e:
+ if settings.SETTINGS_MODULE:
+ project_name = settings.SETTINGS_MODULE.split('.')[0]
+ try:
+ url = reverse(project_name + '.' + self.view_name,
+ args=args, kwargs=kwargs, current_app=context.current_app)
+ except NoReverseMatch:
+ if self.asvar is None:
+ # Re-raise the original exception, not the one with
+ # the path relative to the project. This makes a
+ # better error message.
+ raise e
+ else:
+ if self.asvar is None:
+ raise e
+
+ if self.asvar:
+ context[self.asvar] = url
+ return ''
+ else:
+ return url
+
+class WidthRatioNode(Node):
+ def __init__(self, val_expr, max_expr, max_width):
+ self.val_expr = val_expr
+ self.max_expr = max_expr
+ self.max_width = max_width
+
+ def render(self, context):
+ try:
+ value = self.val_expr.resolve(context)
+ maxvalue = self.max_expr.resolve(context)
+ max_width = int(self.max_width.resolve(context))
+ except VariableDoesNotExist:
+ return ''
+ except ValueError:
+ raise TemplateSyntaxError("widthratio final argument must be an number")
+ try:
+ value = float(value)
+ maxvalue = float(maxvalue)
+ ratio = (value / maxvalue) * max_width
+ except (ValueError, ZeroDivisionError):
+ return ''
+ return str(int(round(ratio)))
+
+class WithNode(Node):
+ def __init__(self, var, name, nodelist):
+ self.var = var
+ self.name = name
+ self.nodelist = nodelist
+
+ def __repr__(self):
+ return "<WithNode>"
+
+ def render(self, context):
+ val = self.var.resolve(context)
+ context.push()
+ context[self.name] = val
+ output = self.nodelist.render(context)
+ context.pop()
+ return output
+
+#@register.tag
+def autoescape(parser, token):
+ """
+ Force autoescape behaviour for this block.
+ """
+ args = token.contents.split()
+ if len(args) != 2:
+ raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
+ arg = args[1]
+ if arg not in (u'on', u'off'):
+ raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
+ nodelist = parser.parse(('endautoescape',))
+ parser.delete_first_token()
+ return AutoEscapeControlNode((arg == 'on'), nodelist)
+autoescape = register.tag(autoescape)
+
+#@register.tag
+def comment(parser, token):
+ """
+ Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
+ """
+ parser.skip_past('endcomment')
+ return CommentNode()
+comment = register.tag(comment)
+
+#@register.tag
+def cycle(parser, token):
+ """
+ Cycles among the given strings each time this tag is encountered.
+
+ Within a loop, cycles among the given strings each time through
+ the loop::
+
+ {% for o in some_list %}
+ <tr class="{% cycle 'row1' 'row2' %}">
+ ...
+ </tr>
+ {% endfor %}
+
+ Outside of a loop, give the values a unique name the first time you call
+ it, then use that name each sucessive time through::
+
+ <tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
+ <tr class="{% cycle rowcolors %}">...</tr>
+ <tr class="{% cycle rowcolors %}">...</tr>
+
+ You can use any number of values, separated by spaces. Commas can also
+ be used to separate values; if a comma is used, the cycle values are
+ interpreted as literal strings.
+ """
+
+ # Note: This returns the exact same node on each {% cycle name %} call;
+ # that is, the node object returned from {% cycle a b c as name %} and the
+ # one returned from {% cycle name %} are the exact same object. This
+ # shouldn't cause problems (heh), but if it does, now you know.
+ #
+ # Ugly hack warning: This stuffs the named template dict into parser so
+ # that names are only unique within each template (as opposed to using
+ # a global variable, which would make cycle names have to be unique across
+ # *all* templates.
+
+ args = token.split_contents()
+
+ if len(args) < 2:
+ raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
+
+ if ',' in args[1]:
+ # Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
+ # case.
+ args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
+
+ if len(args) == 2:
+ # {% cycle foo %} case.
+ name = args[1]
+ if not hasattr(parser, '_namedCycleNodes'):
+ raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
+ if not name in parser._namedCycleNodes:
+ raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
+ return parser._namedCycleNodes[name]
+
+ if len(args) > 4 and args[-2] == 'as':
+ name = args[-1]
+ values = [parser.compile_filter(arg) for arg in args[1:-2]]
+ node = CycleNode(values, name)
+ if not hasattr(parser, '_namedCycleNodes'):
+ parser._namedCycleNodes = {}
+ parser._namedCycleNodes[name] = node
+ else:
+ values = [parser.compile_filter(arg) for arg in args[1:]]
+ node = CycleNode(values)
+ return node
+cycle = register.tag(cycle)
+
+def csrf_token(parser, token):
+ return CsrfTokenNode()
+register.tag(csrf_token)
+
+def debug(parser, token):
+ """
+ Outputs a whole load of debugging information, including the current
+ context and imported modules.
+
+ Sample usage::
+
+ <pre>
+ {% debug %}
+ </pre>
+ """
+ return DebugNode()
+debug = register.tag(debug)
+
+#@register.tag(name="filter")
+def do_filter(parser, token):
+ """
+ Filters the contents of the block through variable filters.
+
+ Filters can also be piped through each other, and they can have
+ arguments -- just like in variable syntax.
+
+ Sample usage::
+
+ {% filter force_escape|lower %}
+ This text will be HTML-escaped, and will appear in lowercase.
+ {% endfilter %}
+ """
+ _, rest = token.contents.split(None, 1)
+ filter_expr = parser.compile_filter("var|%s" % (rest))
+ for func, unused in filter_expr.filters:
+ if getattr(func, '_decorated_function', func).__name__ in ('escape', 'safe'):
+ raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % func.__name__)
+ nodelist = parser.parse(('endfilter',))
+ parser.delete_first_token()
+ return FilterNode(filter_expr, nodelist)
+do_filter = register.tag("filter", do_filter)
+
+#@register.tag
+def firstof(parser, token):
+ """
+ Outputs the first variable passed that is not False, without escaping.
+
+ Outputs nothing if all the passed variables are False.
+
+ Sample usage::
+
+ {% firstof var1 var2 var3 %}
+
+ This is equivalent to::
+
+ {% if var1 %}
+ {{ var1|safe }}
+ {% else %}{% if var2 %}
+ {{ var2|safe }}
+ {% else %}{% if var3 %}
+ {{ var3|safe }}
+ {% endif %}{% endif %}{% endif %}
+
+ but obviously much cleaner!
+
+ You can also use a literal string as a fallback value in case all
+ passed variables are False::
+
+ {% firstof var1 var2 var3 "fallback value" %}
+
+ If you want to escape the output, use a filter tag::
+
+ {% filter force_escape %}
+ {% firstof var1 var2 var3 "fallback value" %}
+ {% endfilter %}
+
+ """
+ bits = token.split_contents()[1:]
+ if len(bits) < 1:
+ raise TemplateSyntaxError("'firstof' statement requires at least one argument")
+ return FirstOfNode([parser.compile_filter(bit) for bit in bits])
+firstof = register.tag(firstof)
+
+#@register.tag(name="for")
+def do_for(parser, token):
+ """
+ Loops over each item in an array.
+
+ For example, to display a list of athletes given ``athlete_list``::
+
+ <ul>
+ {% for athlete in athlete_list %}
+ <li>{{ athlete.name }}</li>
+ {% endfor %}
+ </ul>
+
+ You can loop over a list in reverse by using
+ ``{% for obj in list reversed %}``.
+
+ You can also unpack multiple values from a two-dimensional array::
+
+ {% for key,value in dict.items %}
+ {{ key }}: {{ value }}
+ {% endfor %}
+
+ The ``for`` tag can take an optional ``{% empty %}`` clause that will
+ be displayed if the given array is empty or could not be found::
+
+ <ul>
+ {% for athlete in athlete_list %}
+ <li>{{ athlete.name }}</li>
+ {% empty %}
+ <li>Sorry, no athletes in this list.</li>
+ {% endfor %}
+ <ul>
+
+ The above is equivalent to -- but shorter, cleaner, and possibly faster
+ than -- the following::
+
+ <ul>
+ {% if althete_list %}
+ {% for athlete in athlete_list %}
+ <li>{{ athlete.name }}</li>
+ {% endfor %}
+ {% else %}
+ <li>Sorry, no athletes in this list.</li>
+ {% endif %}
+ </ul>
+
+ The for loop sets a number of variables available within the loop:
+
+ ========================== ================================================
+ Variable Description
+ ========================== ================================================
+ ``forloop.counter`` The current iteration of the loop (1-indexed)
+ ``forloop.counter0`` The current iteration of the loop (0-indexed)
+ ``forloop.revcounter`` The number of iterations from the end of the
+ loop (1-indexed)
+ ``forloop.revcounter0`` The number of iterations from the end of the
+ loop (0-indexed)
+ ``forloop.first`` True if this is the first time through the loop
+ ``forloop.last`` True if this is the last time through the loop
+ ``forloop.parentloop`` For nested loops, this is the loop "above" the
+ current one
+ ========================== ================================================
+
+ """
+ bits = token.contents.split()
+ if len(bits) < 4:
+ raise TemplateSyntaxError("'for' statements should have at least four"
+ " words: %s" % token.contents)
+
+ is_reversed = bits[-1] == 'reversed'
+ in_index = is_reversed and -3 or -2
+ if bits[in_index] != 'in':
+ raise TemplateSyntaxError("'for' statements should use the format"
+ " 'for x in y': %s" % token.contents)
+
+ loopvars = re.sub(r' *, *', ',', ' '.join(bits[1:in_index])).split(',')
+ for var in loopvars:
+ if not var or ' ' in var:
+ raise TemplateSyntaxError("'for' tag received an invalid argument:"
+ " %s" % token.contents)
+
+ sequence = parser.compile_filter(bits[in_index+1])
+ nodelist_loop = parser.parse(('empty', 'endfor',))
+ token = parser.next_token()
+ if token.contents == 'empty':
+ nodelist_empty = parser.parse(('endfor',))
+ parser.delete_first_token()
+ else:
+ nodelist_empty = None
+ return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
+do_for = register.tag("for", do_for)
+
+def do_ifequal(parser, token, negate):
+ bits = list(token.split_contents())
+ if len(bits) != 3:
+ raise TemplateSyntaxError("%r takes two arguments" % bits[0])
+ end_tag = 'end' + bits[0]
+ nodelist_true = parser.parse(('else', end_tag))
+ token = parser.next_token()
+ if token.contents == 'else':
+ nodelist_false = parser.parse((end_tag,))
+ parser.delete_first_token()
+ else:
+ nodelist_false = NodeList()
+ val1 = parser.compile_filter(bits[1])
+ val2 = parser.compile_filter(bits[2])
+ return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
+
+#@register.tag
+def ifequal(parser, token):
+ """
+ Outputs the contents of the block if the two arguments equal each other.
+
+ Examples::
+
+ {% ifequal user.id comment.user_id %}
+ ...
+ {% endifequal %}
+
+ {% ifnotequal user.id comment.user_id %}
+ ...
+ {% else %}
+ ...
+ {% endifnotequal %}
+ """
+ return do_ifequal(parser, token, False)
+ifequal = register.tag(ifequal)
+
+#@register.tag
+def ifnotequal(parser, token):
+ """
+ Outputs the contents of the block if the two arguments are not equal.
+ See ifequal.
+ """
+ return do_ifequal(parser, token, True)
+ifnotequal = register.tag(ifnotequal)
+
+class TemplateLiteral(Literal):
+ def __init__(self, value, text):
+ self.value = value
+ self.text = text # for better error messages
+
+ def display(self):
+ return self.text
+
+ def eval(self, context):
+ return self.value.resolve(context, ignore_failures=True)
+
+class TemplateIfParser(IfParser):
+ error_class = TemplateSyntaxError
+
+ def __init__(self, parser, *args, **kwargs):
+ self.template_parser = parser
+ return super(TemplateIfParser, self).__init__(*args, **kwargs)
+
+ def create_var(self, value):
+ return TemplateLiteral(self.template_parser.compile_filter(value), value)
+
+#@register.tag(name="if")
+def do_if(parser, token):
+ """
+ The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
+ (i.e., exists, is not empty, and is not a false boolean value), the
+ contents of the block are output:
+
+ ::
+
+ {% if athlete_list %}
+ Number of athletes: {{ athlete_list|count }}
+ {% else %}
+ No athletes.
+ {% endif %}
+
+ In the above, if ``athlete_list`` is not empty, the number of athletes will
+ be displayed by the ``{{ athlete_list|count }}`` variable.
+
+ As you can see, the ``if`` tag can take an option ``{% else %}`` clause
+ that will be displayed if the test fails.
+
+ ``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
+ variables or to negate a given variable::
+
+ {% if not athlete_list %}
+ There are no athletes.
+ {% endif %}
+
+ {% if athlete_list or coach_list %}
+ There are some athletes or some coaches.
+ {% endif %}
+
+ {% if athlete_list and coach_list %}
+ Both atheletes and coaches are available.
+ {% endif %}
+
+ {% if not athlete_list or coach_list %}
+ There are no athletes, or there are some coaches.
+ {% endif %}
+
+ {% if athlete_list and not coach_list %}
+ There are some athletes and absolutely no coaches.
+ {% endif %}
+
+ Comparison operators are also available, and the use of filters is also
+ allowed, for example:
+
+ {% if articles|length >= 5 %}...{% endif %}
+
+ Arguments and operators _must_ have a space between them, so
+ ``{% if 1>2 %}`` is not a valid if tag.
+
+ All supported operators are: ``or``, ``and``, ``in``, ``not in``
+ ``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
+
+ Operator precedence follows Python.
+ """
+ bits = token.split_contents()[1:]
+ var = TemplateIfParser(parser, bits).parse()
+ nodelist_true = parser.parse(('else', 'endif'))
+ token = parser.next_token()
+ if token.contents == 'else':
+ nodelist_false = parser.parse(('endif',))
+ parser.delete_first_token()
+ else:
+ nodelist_false = NodeList()
+ return IfNode(var, nodelist_true, nodelist_false)
+do_if = register.tag("if", do_if)
+
+#@register.tag
+def ifchanged(parser, token):
+ """
+ Checks if a value has changed from the last iteration of a loop.
+
+ The 'ifchanged' block tag is used within a loop. It has two possible uses.
+
+ 1. Checks its own rendered contents against its previous state and only
+ displays the content if it has changed. For example, this displays a
+ list of days, only displaying the month if it changes::
+
+ <h1>Archive for {{ year }}</h1>
+
+ {% for date in days %}
+ {% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
+ <a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
+ {% endfor %}
+
+ 2. If given a variable, check whether that variable has changed.
+ For example, the following shows the date every time it changes, but
+ only shows the hour if both the hour and the date have changed::
+
+ {% for date in days %}
+ {% ifchanged date.date %} {{ date.date }} {% endifchanged %}
+ {% ifchanged date.hour date.date %}
+ {{ date.hour }}
+ {% endifchanged %}
+ {% endfor %}
+ """
+ bits = token.contents.split()
+ nodelist_true = parser.parse(('else', 'endifchanged'))
+ token = parser.next_token()
+ if token.contents == 'else':
+ nodelist_false = parser.parse(('endifchanged',))
+ parser.delete_first_token()
+ else:
+ nodelist_false = NodeList()
+ values = [parser.compile_filter(bit) for bit in bits[1:]]
+ return IfChangedNode(nodelist_true, nodelist_false, *values)
+ifchanged = register.tag(ifchanged)
+
+#@register.tag
+def ssi(parser, token):
+ """
+ Outputs the contents of a given file into the page.
+
+ Like a simple "include" tag, the ``ssi`` tag includes the contents
+ of another file -- which must be specified using an absolute path --
+ in the current page::
+
+ {% ssi /home/html/ljworld.com/includes/right_generic.html %}
+
+ If the optional "parsed" parameter is given, the contents of the included
+ file are evaluated as template code, with the current context::
+
+ {% ssi /home/html/ljworld.com/includes/right_generic.html parsed %}
+ """
+ bits = token.contents.split()
+ parsed = False
+ if len(bits) not in (2, 3):
+ raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
+ " the file to be included")
+ if len(bits) == 3:
+ if bits[2] == 'parsed':
+ parsed = True
+ else:
+ raise TemplateSyntaxError("Second (optional) argument to %s tag"
+ " must be 'parsed'" % bits[0])
+ return SsiNode(bits[1], parsed)
+ssi = register.tag(ssi)
+
+#@register.tag
+def load(parser, token):
+ """
+ Loads a custom template tag set.
+
+ For example, to load the template tags in
+ ``django/templatetags/news/photos.py``::
+
+ {% load news.photos %}
+ """
+ bits = token.contents.split()
+ for taglib in bits[1:]:
+ # add the library to the parser
+ try:
+ lib = get_library(taglib)
+ parser.add_library(lib)
+ except InvalidTemplateLibrary, e:
+ raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
+ (taglib, e))
+ return LoadNode()
+load = register.tag(load)
+
+#@register.tag
+def now(parser, token):
+ """
+ Displays the date, formatted according to the given string.
+
+ Uses the same format as PHP's ``date()`` function; see http://php.net/date
+ for all the possible values.
+
+ Sample usage::
+
+ It is {% now "jS F Y H:i" %}
+ """
+ bits = token.contents.split('"')
+ if len(bits) != 3:
+ raise TemplateSyntaxError("'now' statement takes one argument")
+ format_string = bits[1]
+ return NowNode(format_string)
+now = register.tag(now)
+
+#@register.tag
+def regroup(parser, token):
+ """
+ Regroups a list of alike objects by a common attribute.
+
+ This complex tag is best illustrated by use of an example: say that
+ ``people`` is a list of ``Person`` objects that have ``first_name``,
+ ``last_name``, and ``gender`` attributes, and you'd like to display a list
+ that looks like:
+
+ * Male:
+ * George Bush
+ * Bill Clinton
+ * Female:
+ * Margaret Thatcher
+ * Colendeeza Rice
+ * Unknown:
+ * Pat Smith
+
+ The following snippet of template code would accomplish this dubious task::
+
+ {% regroup people by gender as grouped %}
+ <ul>
+ {% for group in grouped %}
+ <li>{{ group.grouper }}
+ <ul>
+ {% for item in group.list %}
+ <li>{{ item }}</li>
+ {% endfor %}
+ </ul>
+ {% endfor %}
+ </ul>
+
+ As you can see, ``{% regroup %}`` populates a variable with a list of
+ objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
+ item that was grouped by; ``list`` contains the list of objects that share
+ that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
+ and ``Unknown``, and ``list`` is the list of people with those genders.
+
+ Note that ``{% regroup %}`` does not work when the list to be grouped is not
+ sorted by the key you are grouping by! This means that if your list of
+ people was not sorted by gender, you'd need to make sure it is sorted
+ before using it, i.e.::
+
+ {% regroup people|dictsort:"gender" by gender as grouped %}
+
+ """
+ firstbits = token.contents.split(None, 3)
+ if len(firstbits) != 4:
+ raise TemplateSyntaxError("'regroup' tag takes five arguments")
+ target = parser.compile_filter(firstbits[1])
+ if firstbits[2] != 'by':
+ raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
+ lastbits_reversed = firstbits[3][::-1].split(None, 2)
+ if lastbits_reversed[1][::-1] != 'as':
+ raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
+ " be 'as'")
+
+ expression = parser.compile_filter(lastbits_reversed[2][::-1])
+
+ var_name = lastbits_reversed[0][::-1]
+ return RegroupNode(target, expression, var_name)
+regroup = register.tag(regroup)
+
+def spaceless(parser, token):
+ """
+ Removes whitespace between HTML tags, including tab and newline characters.
+
+ Example usage::
+
+ {% spaceless %}
+ <p>
+ <a href="foo/">Foo</a>
+ </p>
+ {% endspaceless %}
+
+ This example would return this HTML::
+
+ <p><a href="foo/">Foo</a></p>
+
+ Only space between *tags* is normalized -- not space between tags and text.
+ In this example, the space around ``Hello`` won't be stripped::
+
+ {% spaceless %}
+ <strong>
+ Hello
+ </strong>
+ {% endspaceless %}
+ """
+ nodelist = parser.parse(('endspaceless',))
+ parser.delete_first_token()
+ return SpacelessNode(nodelist)
+spaceless = register.tag(spaceless)
+
+#@register.tag
+def templatetag(parser, token):
+ """
+ Outputs one of the bits used to compose template tags.
+
+ Since the template system has no concept of "escaping", to display one of
+ the bits used in template tags, you must use the ``{% templatetag %}`` tag.
+
+ The argument tells which template bit to output:
+
+ ================== =======
+ Argument Outputs
+ ================== =======
+ ``openblock`` ``{%``
+ ``closeblock`` ``%}``
+ ``openvariable`` ``{{``
+ ``closevariable`` ``}}``
+ ``openbrace`` ``{``
+ ``closebrace`` ``}``
+ ``opencomment`` ``{#``
+ ``closecomment`` ``#}``
+ ================== =======
+ """
+ bits = token.contents.split()
+ if len(bits) != 2:
+ raise TemplateSyntaxError("'templatetag' statement takes one argument")
+ tag = bits[1]
+ if tag not in TemplateTagNode.mapping:
+ raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
+ " Must be one of: %s" %
+ (tag, TemplateTagNode.mapping.keys()))
+ return TemplateTagNode(tag)
+templatetag = register.tag(templatetag)
+
+def url(parser, token):
+ """
+ Returns an absolute URL matching given view with its parameters.
+
+ This is a way to define links that aren't tied to a particular URL
+ configuration::
+
+ {% url path.to.some_view arg1 arg2 %}
+
+ or
+
+ {% url path.to.some_view name1=value1 name2=value2 %}
+
+ The first argument is a path to a view. It can be an absolute python path
+ or just ``app_name.view_name`` without the project name if the view is
+ located inside the project. Other arguments are comma-separated values
+ that will be filled in place of positional and keyword arguments in the
+ URL. All arguments for the URL should be present.
+
+ For example if you have a view ``app_name.client`` taking client's id and
+ the corresponding line in a URLconf looks like this::
+
+ ('^client/(\d+)/$', 'app_name.client')
+
+ and this app's URLconf is included into the project's URLconf under some
+ path::
+
+ ('^clients/', include('project_name.app_name.urls'))
+
+ then in a template you can create a link for a certain client like this::
+
+ {% url app_name.client client.id %}
+
+ The URL will look like ``/clients/client/123/``.
+ """
+ bits = token.split_contents()
+ if len(bits) < 2:
+ raise TemplateSyntaxError("'%s' takes at least one argument"
+ " (path to a view)" % bits[0])
+ viewname = bits[1]
+ args = []
+ kwargs = {}
+ asvar = None
+ bits = bits[2:]
+ if len(bits) >= 2 and bits[-2] == 'as':
+ asvar = bits[-1]
+ bits = bits[:-2]
+
+ # Backwards compatibility: check for the old comma separated format
+ # {% url urlname arg1,arg2 %}
+ # Initial check - that the first space separated bit has a comma in it
+ if bits and ',' in bits[0]:
+ check_old_format = True
+ # In order to *really* be old format, there must be a comma
+ # in *every* space separated bit, except the last.
+ for bit in bits[1:-1]:
+ if ',' not in bit:
+ # No comma in this bit. Either the comma we found
+ # in bit 1 was a false positive (e.g., comma in a string),
+ # or there is a syntax problem with missing commas
+ check_old_format = False
+ break
+ else:
+ # No comma found - must be new format.
+ check_old_format = False
+
+ if check_old_format:
+ # Confirm that this is old format by trying to parse the first
+ # argument. An exception will be raised if the comma is
+ # unexpected (i.e. outside of a static string).
+ match = kwarg_re.match(bits[0])
+ if match:
+ value = match.groups()[1]
+ try:
+ parser.compile_filter(value)
+ except TemplateSyntaxError:
+ bits = ''.join(bits).split(',')
+
+ # Now all the bits are parsed into new format,
+ # process them as template vars
+ if len(bits):
+ for bit in bits:
+ match = kwarg_re.match(bit)
+ if not match:
+ raise TemplateSyntaxError("Malformed arguments to url tag")
+ name, value = match.groups()
+ if name:
+ kwargs[name] = parser.compile_filter(value)
+ else:
+ args.append(parser.compile_filter(value))
+
+ return URLNode(viewname, args, kwargs, asvar)
+url = register.tag(url)
+
+#@register.tag
+def widthratio(parser, token):
+ """
+ For creating bar charts and such, this tag calculates the ratio of a given
+ value to a maximum value, and then applies that ratio to a constant.
+
+ For example::
+
+ <img src='bar.gif' height='10' width='{% widthratio this_value max_value 100 %}' />
+
+ Above, if ``this_value`` is 175 and ``max_value`` is 200, the image in
+ the above example will be 88 pixels wide (because 175/200 = .875;
+ .875 * 100 = 87.5 which is rounded up to 88).
+ """
+ bits = token.contents.split()
+ if len(bits) != 4:
+ raise TemplateSyntaxError("widthratio takes three arguments")
+ tag, this_value_expr, max_value_expr, max_width = bits
+
+ return WidthRatioNode(parser.compile_filter(this_value_expr),
+ parser.compile_filter(max_value_expr),
+ parser.compile_filter(max_width))
+widthratio = register.tag(widthratio)
+
+#@register.tag
+def do_with(parser, token):
+ """
+ Adds a value to the context (inside of this block) for caching and easy
+ access.
+
+ For example::
+
+ {% with person.some_sql_method as total %}
+ {{ total }} object{{ total|pluralize }}
+ {% endwith %}
+ """
+ bits = list(token.split_contents())
+ if len(bits) != 4 or bits[2] != "as":
+ raise TemplateSyntaxError("%r expected format is 'value as name'" %
+ bits[0])
+ var = parser.compile_filter(bits[1])
+ name = bits[3]
+ nodelist = parser.parse(('endwith',))
+ parser.delete_first_token()
+ return WithNode(var, name, nodelist)
+do_with = register.tag('with', do_with)
diff --git a/google/appengine/_internal/django/template/loader.py b/google/appengine/_internal/django/template/loader.py
new file mode 100644
index 0000000..7bfac7b
--- /dev/null
+++ b/google/appengine/_internal/django/template/loader.py
@@ -0,0 +1,198 @@
+# Wrapper for loading templates from storage of some sort (e.g. filesystem, database).
+#
+# This uses the TEMPLATE_LOADERS setting, which is a list of loaders to use.
+# Each loader is expected to have this interface:
+#
+# callable(name, dirs=[])
+#
+# name is the template name.
+# dirs is an optional list of directories to search instead of TEMPLATE_DIRS.
+#
+# The loader should return a tuple of (template_source, path). The path returned
+# might be shown to the user for debugging purposes, so it should identify where
+# the template was loaded from.
+#
+# A loader may return an already-compiled template instead of the actual
+# template source. In that case the path returned should be None, since the
+# path information is associated with the template during the compilation,
+# which has already been done.
+#
+# Each loader should have an "is_usable" attribute set. This is a boolean that
+# specifies whether the loader can be used in this Python installation. Each
+# loader is responsible for setting this when it's initialized.
+#
+# For example, the eggs loader (which is capable of loading templates from
+# Python eggs) sets is_usable to False if the "pkg_resources" module isn't
+# installed, because pkg_resources is necessary to read eggs.
+
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.template import Origin, Template, Context, TemplateDoesNotExist, add_to_builtins
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.conf import settings
+
+template_source_loaders = None
+
+class BaseLoader(object):
+ is_usable = False
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __call__(self, template_name, template_dirs=None):
+ return self.load_template(template_name, template_dirs)
+
+ def load_template(self, template_name, template_dirs=None):
+ source, display_name = self.load_template_source(template_name, template_dirs)
+ origin = make_origin(display_name, self.load_template_source, template_name, template_dirs)
+ try:
+ template = get_template_from_string(source, origin, template_name)
+ return template, None
+ except TemplateDoesNotExist:
+ # If compiling the template we found raises TemplateDoesNotExist, back off to
+ # returning the source and display name for the template we were asked to load.
+ # This allows for correct identification (later) of the actual template that does
+ # not exist.
+ return source, display_name
+
+ def load_template_source(self, template_name, template_dirs=None):
+ """
+ Returns a tuple containing the source and origin for the given template
+ name.
+
+ """
+ raise NotImplementedError
+
+ def reset(self):
+ """
+ Resets any state maintained by the loader instance (e.g., cached
+ templates or cached loader modules).
+
+ """
+ pass
+
+class LoaderOrigin(Origin):
+ def __init__(self, display_name, loader, name, dirs):
+ super(LoaderOrigin, self).__init__(display_name)
+ self.loader, self.loadname, self.dirs = loader, name, dirs
+
+ def reload(self):
+ return self.loader(self.loadname, self.dirs)[0]
+
+def make_origin(display_name, loader, name, dirs):
+ if settings.TEMPLATE_DEBUG and display_name:
+ return LoaderOrigin(display_name, loader, name, dirs)
+ else:
+ return None
+
+def find_template_loader(loader):
+ if isinstance(loader, (tuple, list)):
+ loader, args = loader[0], loader[1:]
+ else:
+ args = []
+ if isinstance(loader, basestring):
+ module, attr = loader.rsplit('.', 1)
+ try:
+ mod = import_module(module)
+ except ImportError, e:
+ raise ImproperlyConfigured('Error importing template source loader %s: "%s"' % (loader, e))
+ try:
+ TemplateLoader = getattr(mod, attr)
+ except AttributeError, e:
+ raise ImproperlyConfigured('Error importing template source loader %s: "%s"' % (loader, e))
+
+ if hasattr(TemplateLoader, 'load_template_source'):
+ func = TemplateLoader(*args)
+ else:
+ # Try loading module the old way - string is full path to callable
+ if args:
+ raise ImproperlyConfigured("Error importing template source loader %s - can't pass arguments to function-based loader." % loader)
+ func = TemplateLoader
+
+ if not func.is_usable:
+ import warnings
+ warnings.warn("Your TEMPLATE_LOADERS setting includes %r, but your Python installation doesn't support that type of template loading. Consider removing that line from TEMPLATE_LOADERS." % loader)
+ return None
+ else:
+ return func
+ else:
+ raise ImproperlyConfigured('Loader does not define a "load_template" callable template source loader')
+
+def find_template(name, dirs=None):
+ # Calculate template_source_loaders the first time the function is executed
+ # because putting this logic in the module-level namespace may cause
+ # circular import errors. See Django ticket #1292.
+ global template_source_loaders
+ if template_source_loaders is None:
+ loaders = []
+ for loader_name in settings.TEMPLATE_LOADERS:
+ loader = find_template_loader(loader_name)
+ if loader is not None:
+ loaders.append(loader)
+ template_source_loaders = tuple(loaders)
+ for loader in template_source_loaders:
+ try:
+ source, display_name = loader(name, dirs)
+ return (source, make_origin(display_name, loader, name, dirs))
+ except TemplateDoesNotExist:
+ pass
+ raise TemplateDoesNotExist(name)
+
+def find_template_source(name, dirs=None):
+ # For backward compatibility
+ import warnings
+ warnings.warn(
+ "`django.template.loaders.find_template_source` is deprecated; use `django.template.loaders.find_template` instead.",
+ PendingDeprecationWarning
+ )
+ template, origin = find_template(name, dirs)
+ if hasattr(template, 'render'):
+ raise Exception("Found a compiled template that is incompatible with the deprecated `django.template.loaders.find_template_source` function.")
+ return template, origin
+
+def get_template(template_name):
+ """
+ Returns a compiled Template object for the given template name,
+ handling template inheritance recursively.
+ """
+ template, origin = find_template(template_name)
+ if not hasattr(template, 'render'):
+ # template needs to be compiled
+ template = get_template_from_string(template, origin, template_name)
+ return template
+
+def get_template_from_string(source, origin=None, name=None):
+ """
+ Returns a compiled Template object for the given template code,
+ handling template inheritance recursively.
+ """
+ return Template(source, origin, name)
+
+def render_to_string(template_name, dictionary=None, context_instance=None):
+ """
+ Loads the given template_name and renders it with the given dictionary as
+ context. The template_name may be a string to load a single template using
+ get_template, or it may be a tuple to use select_template to find one of
+ the templates in the list. Returns a string.
+ """
+ dictionary = dictionary or {}
+ if isinstance(template_name, (list, tuple)):
+ t = select_template(template_name)
+ else:
+ t = get_template(template_name)
+ if context_instance:
+ context_instance.update(dictionary)
+ else:
+ context_instance = Context(dictionary)
+ return t.render(context_instance)
+
+def select_template(template_name_list):
+ "Given a list of template names, returns the first that can be loaded."
+ for template_name in template_name_list:
+ try:
+ return get_template(template_name)
+ except TemplateDoesNotExist:
+ continue
+ # If we get here, none of the templates could be loaded
+ raise TemplateDoesNotExist(', '.join(template_name_list))
+
+add_to_builtins('google.appengine._internal.django.template.loader_tags')
diff --git a/google/appengine/_internal/django/template/loader_tags.py b/google/appengine/_internal/django/template/loader_tags.py
new file mode 100644
index 0000000..37a3326
--- /dev/null
+++ b/google/appengine/_internal/django/template/loader_tags.py
@@ -0,0 +1,218 @@
+from google.appengine._internal.django.template import TemplateSyntaxError, TemplateDoesNotExist, Variable
+from google.appengine._internal.django.template import Library, Node, TextNode
+from google.appengine._internal.django.template.loader import get_template
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils.safestring import mark_safe
+
+register = Library()
+
+BLOCK_CONTEXT_KEY = 'block_context'
+
+class ExtendsError(Exception):
+ pass
+
+class BlockContext(object):
+ def __init__(self):
+ # Dictionary of FIFO queues.
+ self.blocks = {}
+
+ def add_blocks(self, blocks):
+ for name, block in blocks.iteritems():
+ if name in self.blocks:
+ self.blocks[name].insert(0, block)
+ else:
+ self.blocks[name] = [block]
+
+ def pop(self, name):
+ try:
+ return self.blocks[name].pop()
+ except (IndexError, KeyError):
+ return None
+
+ def push(self, name, block):
+ self.blocks[name].append(block)
+
+ def get_block(self, name):
+ try:
+ return self.blocks[name][-1]
+ except (IndexError, KeyError):
+ return None
+
+class BlockNode(Node):
+ def __init__(self, name, nodelist, parent=None):
+ self.name, self.nodelist, self.parent = name, nodelist, parent
+
+ def __repr__(self):
+ return "<Block Node: %s. Contents: %r>" % (self.name, self.nodelist)
+
+ def render(self, context):
+ block_context = context.render_context.get(BLOCK_CONTEXT_KEY)
+ context.push()
+ if block_context is None:
+ context['block'] = self
+ result = self.nodelist.render(context)
+ else:
+ push = block = block_context.pop(self.name)
+ if block is None:
+ block = self
+ # Create new block so we can store context without thread-safety issues.
+ block = BlockNode(block.name, block.nodelist)
+ block.context = context
+ context['block'] = block
+ result = block.nodelist.render(context)
+ if push is not None:
+ block_context.push(self.name, push)
+ context.pop()
+ return result
+
+ def super(self):
+ render_context = self.context.render_context
+ if (BLOCK_CONTEXT_KEY in render_context and
+ render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None):
+ return mark_safe(self.render(self.context))
+ return ''
+
+class ExtendsNode(Node):
+ must_be_first = True
+
+ def __init__(self, nodelist, parent_name, parent_name_expr, template_dirs=None):
+ self.nodelist = nodelist
+ self.parent_name, self.parent_name_expr = parent_name, parent_name_expr
+ self.template_dirs = template_dirs
+ self.blocks = dict([(n.name, n) for n in nodelist.get_nodes_by_type(BlockNode)])
+
+ def __repr__(self):
+ if self.parent_name_expr:
+ return "<ExtendsNode: extends %s>" % self.parent_name_expr.token
+ return '<ExtendsNode: extends "%s">' % self.parent_name
+
+ def get_parent(self, context):
+ if self.parent_name_expr:
+ self.parent_name = self.parent_name_expr.resolve(context)
+ parent = self.parent_name
+ if not parent:
+ error_msg = "Invalid template name in 'extends' tag: %r." % parent
+ if self.parent_name_expr:
+ error_msg += " Got this from the '%s' variable." % self.parent_name_expr.token
+ raise TemplateSyntaxError(error_msg)
+ if hasattr(parent, 'render'):
+ return parent # parent is a Template object
+ return get_template(parent)
+
+ def render(self, context):
+ compiled_parent = self.get_parent(context)
+
+ if BLOCK_CONTEXT_KEY not in context.render_context:
+ context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
+ block_context = context.render_context[BLOCK_CONTEXT_KEY]
+
+ # Add the block nodes from this node to the block context
+ block_context.add_blocks(self.blocks)
+
+ # If this block's parent doesn't have an extends node it is the root,
+ # and its block nodes also need to be added to the block context.
+ for node in compiled_parent.nodelist:
+ # The ExtendsNode has to be the first non-text node.
+ if not isinstance(node, TextNode):
+ if not isinstance(node, ExtendsNode):
+ blocks = dict([(n.name, n) for n in
+ compiled_parent.nodelist.get_nodes_by_type(BlockNode)])
+ block_context.add_blocks(blocks)
+ break
+
+ # Call Template._render explicitly so the parser context stays
+ # the same.
+ return compiled_parent._render(context)
+
+class ConstantIncludeNode(Node):
+ def __init__(self, template_path):
+ try:
+ t = get_template(template_path)
+ self.template = t
+ except:
+ if settings.TEMPLATE_DEBUG:
+ raise
+ self.template = None
+
+ def render(self, context):
+ if self.template:
+ return self.template.render(context)
+ else:
+ return ''
+
+class IncludeNode(Node):
+ def __init__(self, template_name):
+ self.template_name = Variable(template_name)
+
+ def render(self, context):
+ try:
+ template_name = self.template_name.resolve(context)
+ t = get_template(template_name)
+ return t.render(context)
+ except:
+ if settings.TEMPLATE_DEBUG:
+ raise
+ return ''
+
+def do_block(parser, token):
+ """
+ Define a block that can be overridden by child templates.
+ """
+ bits = token.contents.split()
+ if len(bits) != 2:
+ raise TemplateSyntaxError("'%s' tag takes only one argument" % bits[0])
+ block_name = bits[1]
+ # Keep track of the names of BlockNodes found in this template, so we can
+ # check for duplication.
+ try:
+ if block_name in parser.__loaded_blocks:
+ raise TemplateSyntaxError("'%s' tag with name '%s' appears more than once" % (bits[0], block_name))
+ parser.__loaded_blocks.append(block_name)
+ except AttributeError: # parser.__loaded_blocks isn't a list yet
+ parser.__loaded_blocks = [block_name]
+ nodelist = parser.parse(('endblock', 'endblock %s' % block_name))
+ parser.delete_first_token()
+ return BlockNode(block_name, nodelist)
+
+def do_extends(parser, token):
+ """
+ Signal that this template extends a parent template.
+
+ This tag may be used in two ways: ``{% extends "base" %}`` (with quotes)
+ uses the literal value "base" as the name of the parent template to extend,
+ or ``{% extends variable %}`` uses the value of ``variable`` as either the
+ name of the parent template to extend (if it evaluates to a string) or as
+ the parent tempate itelf (if it evaluates to a Template object).
+ """
+ bits = token.split_contents()
+ if len(bits) != 2:
+ raise TemplateSyntaxError("'%s' takes one argument" % bits[0])
+ parent_name, parent_name_expr = None, None
+ if bits[1][0] in ('"', "'") and bits[1][-1] == bits[1][0]:
+ parent_name = bits[1][1:-1]
+ else:
+ parent_name_expr = parser.compile_filter(bits[1])
+ nodelist = parser.parse()
+ if nodelist.get_nodes_by_type(ExtendsNode):
+ raise TemplateSyntaxError("'%s' cannot appear more than once in the same template" % bits[0])
+ return ExtendsNode(nodelist, parent_name, parent_name_expr)
+
+def do_include(parser, token):
+ """
+ Loads a template and renders it with the current context.
+
+ Example::
+
+ {% include "foo/some_include" %}
+ """
+ bits = token.split_contents()
+ if len(bits) != 2:
+ raise TemplateSyntaxError("%r tag takes one argument: the name of the template to be included" % bits[0])
+ path = bits[1]
+ if path[0] in ('"', "'") and path[-1] == path[0]:
+ return ConstantIncludeNode(path[1:-1])
+ return IncludeNode(bits[1])
+
+register.tag('block', do_block)
+register.tag('extends', do_extends)
+register.tag('include', do_include)
diff --git a/google/appengine/_internal/django/template/loaders/__init__.py b/google/appengine/_internal/django/template/loaders/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/template/loaders/__init__.py
diff --git a/google/appengine/_internal/django/template/loaders/app_directories.py b/google/appengine/_internal/django/template/loaders/app_directories.py
new file mode 100644
index 0000000..f8974bf
--- /dev/null
+++ b/google/appengine/_internal/django/template/loaders/app_directories.py
@@ -0,0 +1,74 @@
+"""
+Wrapper for loading templates from "templates" directories in INSTALLED_APPS
+packages.
+"""
+
+import os
+import sys
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.template import TemplateDoesNotExist
+from google.appengine._internal.django.template.loader import BaseLoader
+from google.appengine._internal.django.utils._os import safe_join
+from google.appengine._internal.django.utils.importlib import import_module
+
+# At compile time, cache the directories to search.
+fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+app_template_dirs = []
+for app in settings.INSTALLED_APPS:
+ try:
+ mod = import_module(app)
+ except ImportError, e:
+ raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0]))
+ template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates')
+ if os.path.isdir(template_dir):
+ app_template_dirs.append(template_dir.decode(fs_encoding))
+
+# It won't change, so convert it to a tuple to save memory.
+app_template_dirs = tuple(app_template_dirs)
+
+class Loader(BaseLoader):
+ is_usable = True
+
+ def get_template_sources(self, template_name, template_dirs=None):
+ """
+ Returns the absolute paths to "template_name", when appended to each
+ directory in "template_dirs". Any paths that don't lie inside one of the
+ template dirs are excluded from the result set, for security reasons.
+ """
+ if not template_dirs:
+ template_dirs = app_template_dirs
+ for template_dir in template_dirs:
+ try:
+ yield safe_join(template_dir, template_name)
+ except UnicodeDecodeError:
+ # The template dir name was a bytestring that wasn't valid UTF-8.
+ raise
+ except ValueError:
+ # The joined path was located outside of template_dir.
+ pass
+
+ def load_template_source(self, template_name, template_dirs=None):
+ for filepath in self.get_template_sources(template_name, template_dirs):
+ try:
+ file = open(filepath)
+ try:
+ return (file.read().decode(settings.FILE_CHARSET), filepath)
+ finally:
+ file.close()
+ except IOError:
+ pass
+ raise TemplateDoesNotExist(template_name)
+
+_loader = Loader()
+
+def load_template_source(template_name, template_dirs=None):
+ # For backwards compatibility
+ import warnings
+ warnings.warn(
+ "'django.template.loaders.app_directories.load_template_source' is deprecated; use 'django.template.loaders.app_directories.Loader' instead.",
+ PendingDeprecationWarning
+ )
+ return _loader.load_template_source(template_name, template_dirs)
+load_template_source.is_usable = True
diff --git a/google/appengine/_internal/django/template/loaders/cached.py b/google/appengine/_internal/django/template/loaders/cached.py
new file mode 100644
index 0000000..ac4e3fb
--- /dev/null
+++ b/google/appengine/_internal/django/template/loaders/cached.py
@@ -0,0 +1,59 @@
+"""
+Wrapper class that takes a list of template loaders as an argument and attempts
+to load templates from them in order, caching the result.
+"""
+
+from google.appengine._internal.django.core.exceptions import ImproperlyConfigured
+from google.appengine._internal.django.template import TemplateDoesNotExist
+from google.appengine._internal.django.template.loader import BaseLoader, get_template_from_string, find_template_loader, make_origin
+from google.appengine._internal.django.utils.hashcompat import sha_constructor
+from google.appengine._internal.django.utils.importlib import import_module
+
+class Loader(BaseLoader):
+ is_usable = True
+
+ def __init__(self, loaders):
+ self.template_cache = {}
+ self._loaders = loaders
+ self._cached_loaders = []
+
+ @property
+ def loaders(self):
+ # Resolve loaders on demand to avoid circular imports
+ if not self._cached_loaders:
+ for loader in self._loaders:
+ self._cached_loaders.append(find_template_loader(loader))
+ return self._cached_loaders
+
+ def find_template(self, name, dirs=None):
+ for loader in self.loaders:
+ try:
+ template, display_name = loader(name, dirs)
+ return (template, make_origin(display_name, loader, name, dirs))
+ except TemplateDoesNotExist:
+ pass
+ raise TemplateDoesNotExist(name)
+
+ def load_template(self, template_name, template_dirs=None):
+ key = template_name
+ if template_dirs:
+ # If template directories were specified, use a hash to differentiate
+ key = '-'.join([template_name, sha_constructor('|'.join(template_dirs)).hexdigest()])
+
+ if key not in self.template_cache:
+ template, origin = self.find_template(template_name, template_dirs)
+ if not hasattr(template, 'render'):
+ try:
+ template = get_template_from_string(template, origin, template_name)
+ except TemplateDoesNotExist:
+ # If compiling the template we found raises TemplateDoesNotExist,
+ # back off to returning the source and display name for the template
+ # we were asked to load. This allows for correct identification (later)
+ # of the actual template that does not exist.
+ return template, origin
+ self.template_cache[key] = template
+ return self.template_cache[key], None
+
+ def reset(self):
+ "Empty the template cache."
+ self.template_cache.clear()
diff --git a/google/appengine/_internal/django/template/loaders/eggs.py b/google/appengine/_internal/django/template/loaders/eggs.py
new file mode 100644
index 0000000..7c9611c
--- /dev/null
+++ b/google/appengine/_internal/django/template/loaders/eggs.py
@@ -0,0 +1,39 @@
+# Wrapper for loading templates from eggs via pkg_resources.resource_string.
+
+try:
+ from pkg_resources import resource_string
+except ImportError:
+ resource_string = None
+
+from google.appengine._internal.django.template import TemplateDoesNotExist
+from google.appengine._internal.django.template.loader import BaseLoader
+from google.appengine._internal.django.conf import settings
+
+class Loader(BaseLoader):
+ is_usable = resource_string is not None
+
+ def load_template_source(self, template_name, template_dirs=None):
+ """
+ Loads templates from Python eggs via pkg_resource.resource_string.
+
+ For every installed app, it tries to get the resource (app, template_name).
+ """
+ if resource_string is not None:
+ pkg_name = 'templates/' + template_name
+ for app in settings.INSTALLED_APPS:
+ try:
+ return (resource_string(app, pkg_name).decode(settings.FILE_CHARSET), 'egg:%s:%s' % (app, pkg_name))
+ except:
+ pass
+ raise TemplateDoesNotExist(template_name)
+
+_loader = Loader()
+
+def load_template_source(template_name, template_dirs=None):
+ import warnings
+ warnings.warn(
+ "'django.template.loaders.eggs.load_template_source' is deprecated; use 'django.template.loaders.eggs.Loader' instead.",
+ PendingDeprecationWarning
+ )
+ return _loader.load_template_source(template_name, template_dirs)
+load_template_source.is_usable = resource_string is not None
diff --git a/google/appengine/_internal/django/template/loaders/filesystem.py b/google/appengine/_internal/django/template/loaders/filesystem.py
new file mode 100644
index 0000000..519c278
--- /dev/null
+++ b/google/appengine/_internal/django/template/loaders/filesystem.py
@@ -0,0 +1,61 @@
+"""
+Wrapper for loading templates from the filesystem.
+"""
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.template import TemplateDoesNotExist
+from google.appengine._internal.django.template.loader import BaseLoader
+from google.appengine._internal.django.utils._os import safe_join
+
+class Loader(BaseLoader):
+ is_usable = True
+
+ def get_template_sources(self, template_name, template_dirs=None):
+ """
+ Returns the absolute paths to "template_name", when appended to each
+ directory in "template_dirs". Any paths that don't lie inside one of the
+ template dirs are excluded from the result set, for security reasons.
+ """
+ if not template_dirs:
+ template_dirs = settings.TEMPLATE_DIRS
+ for template_dir in template_dirs:
+ try:
+ yield safe_join(template_dir, template_name)
+ except UnicodeDecodeError:
+ # The template dir name was a bytestring that wasn't valid UTF-8.
+ raise
+ except ValueError:
+ # The joined path was located outside of this particular
+ # template_dir (it might be inside another one, so this isn't
+ # fatal).
+ pass
+
+ def load_template_source(self, template_name, template_dirs=None):
+ tried = []
+ for filepath in self.get_template_sources(template_name, template_dirs):
+ try:
+ file = open(filepath)
+ try:
+ return (file.read().decode(settings.FILE_CHARSET), filepath)
+ finally:
+ file.close()
+ except IOError:
+ tried.append(filepath)
+ if tried:
+ error_msg = "Tried %s" % tried
+ else:
+ error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
+ raise TemplateDoesNotExist(error_msg)
+ load_template_source.is_usable = True
+
+_loader = Loader()
+
+def load_template_source(template_name, template_dirs=None):
+ # For backwards compatibility
+ import warnings
+ warnings.warn(
+ "'django.template.loaders.filesystem.load_template_source' is deprecated; use 'django.template.loaders.filesystem.Loader' instead.",
+ PendingDeprecationWarning
+ )
+ return _loader.load_template_source(template_name, template_dirs)
+load_template_source.is_usable = True
diff --git a/google/appengine/_internal/django/template/smartif.py b/google/appengine/_internal/django/template/smartif.py
new file mode 100644
index 0000000..e835e0f
--- /dev/null
+++ b/google/appengine/_internal/django/template/smartif.py
@@ -0,0 +1,206 @@
+"""
+Parser and utilities for the smart 'if' tag
+"""
+import operator
+
+# Using a simple top down parser, as described here:
+# http://effbot.org/zone/simple-top-down-parsing.htm.
+# 'led' = left denotation
+# 'nud' = null denotation
+# 'bp' = binding power (left = lbp, right = rbp)
+
+class TokenBase(object):
+ """
+ Base class for operators and literals, mainly for debugging and for throwing
+ syntax errors.
+ """
+ id = None # node/token type name
+ value = None # used by literals
+ first = second = None # used by tree nodes
+
+ def nud(self, parser):
+ # Null denotation - called in prefix context
+ raise parser.error_class(
+ "Not expecting '%s' in this position in if tag." % self.id
+ )
+
+ def led(self, left, parser):
+ # Left denotation - called in infix context
+ raise parser.error_class(
+ "Not expecting '%s' as infix operator in if tag." % self.id
+ )
+
+ def display(self):
+ """
+ Returns what to display in error messages for this node
+ """
+ return self.id
+
+ def __repr__(self):
+ out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
+ return "(" + " ".join(out) + ")"
+
+
+def infix(bp, func):
+ """
+ Creates an infix operator, given a binding power and a function that
+ evaluates the node
+ """
+ class Operator(TokenBase):
+ lbp = bp
+
+ def led(self, left, parser):
+ self.first = left
+ self.second = parser.expression(bp)
+ return self
+
+ def eval(self, context):
+ try:
+ return func(context, self.first, self.second)
+ except Exception:
+ # Templates shouldn't throw exceptions when rendering. We are
+ # most likely to get exceptions for things like {% if foo in bar
+ # %} where 'bar' does not support 'in', so default to False
+ return False
+
+ return Operator
+
+
+def prefix(bp, func):
+ """
+ Creates a prefix operator, given a binding power and a function that
+ evaluates the node.
+ """
+ class Operator(TokenBase):
+ lbp = bp
+
+ def nud(self, parser):
+ self.first = parser.expression(bp)
+ self.second = None
+ return self
+
+ def eval(self, context):
+ try:
+ return func(context, self.first)
+ except Exception:
+ return False
+
+ return Operator
+
+
+# Operator precedence follows Python.
+# NB - we can get slightly more accurate syntax error messages by not using the
+# same object for '==' and '='.
+# We defer variable evaluation to the lambda to ensure that terms are
+# lazily evaluated using Python's boolean parsing logic.
+OPERATORS = {
+ 'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
+ 'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
+ 'not': prefix(8, lambda context, x: not x.eval(context)),
+ 'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
+ 'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
+ '=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
+ '==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
+ '!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
+ '>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
+ '>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
+ '<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
+ '<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
+}
+
+# Assign 'id' to each:
+for key, op in OPERATORS.items():
+ op.id = key
+
+
+class Literal(TokenBase):
+ """
+ A basic self-resolvable object similar to a Django template variable.
+ """
+ # IfParser uses Literal in create_var, but TemplateIfParser overrides
+ # create_var so that a proper implementation that actually resolves
+ # variables, filters etc is used.
+ id = "literal"
+ lbp = 0
+
+ def __init__(self, value):
+ self.value = value
+
+ def display(self):
+ return repr(self.value)
+
+ def nud(self, parser):
+ return self
+
+ def eval(self, context):
+ return self.value
+
+ def __repr__(self):
+ return "(%s %r)" % (self.id, self.value)
+
+
+class EndToken(TokenBase):
+ lbp = 0
+
+ def nud(self, parser):
+ raise parser.error_class("Unexpected end of expression in if tag.")
+
+EndToken = EndToken()
+
+
+class IfParser(object):
+ error_class = ValueError
+
+ def __init__(self, tokens):
+ # pre-pass necessary to turn 'not','in' into single token
+ l = len(tokens)
+ mapped_tokens = []
+ i = 0
+ while i < l:
+ token = tokens[i]
+ if token == "not" and i + 1 < l and tokens[i+1] == "in":
+ token = "not in"
+ i += 1 # skip 'in'
+ mapped_tokens.append(self.translate_token(token))
+ i += 1
+
+ self.tokens = mapped_tokens
+ self.pos = 0
+ self.current_token = self.next()
+
+ def translate_token(self, token):
+ try:
+ op = OPERATORS[token]
+ except (KeyError, TypeError):
+ return self.create_var(token)
+ else:
+ return op()
+
+ def next(self):
+ if self.pos >= len(self.tokens):
+ return EndToken
+ else:
+ retval = self.tokens[self.pos]
+ self.pos += 1
+ return retval
+
+ def parse(self):
+ retval = self.expression()
+ # Check that we have exhausted all the tokens
+ if self.current_token is not EndToken:
+ raise self.error_class("Unused '%s' at end of if expression." %
+ self.current_token.display())
+ return retval
+
+ def expression(self, rbp=0):
+ t = self.current_token
+ self.current_token = self.next()
+ left = t.nud(self)
+ while rbp < self.current_token.lbp:
+ t = self.current_token
+ self.current_token = self.next()
+ left = t.led(left, self)
+ return left
+
+ def create_var(self, value):
+ return Literal(value)
diff --git a/google/appengine/_internal/django/templatetags/__init__.py b/google/appengine/_internal/django/templatetags/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/templatetags/__init__.py
diff --git a/google/appengine/_internal/django/templatetags/cache.py b/google/appengine/_internal/django/templatetags/cache.py
new file mode 100644
index 0000000..f6fcece
--- /dev/null
+++ b/google/appengine/_internal/django/templatetags/cache.py
@@ -0,0 +1,63 @@
+from google.appengine._internal.django.template import Library, Node, TemplateSyntaxError, Variable, VariableDoesNotExist
+from google.appengine._internal.django.template import resolve_variable
+from google.appengine._internal.django.core.cache import cache
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.http import urlquote
+from google.appengine._internal.django.utils.hashcompat import md5_constructor
+
+register = Library()
+
+class CacheNode(Node):
+ def __init__(self, nodelist, expire_time_var, fragment_name, vary_on):
+ self.nodelist = nodelist
+ self.expire_time_var = Variable(expire_time_var)
+ self.fragment_name = fragment_name
+ self.vary_on = vary_on
+
+ def render(self, context):
+ try:
+ expire_time = self.expire_time_var.resolve(context)
+ except VariableDoesNotExist:
+ raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var)
+ try:
+ expire_time = int(expire_time)
+ except (ValueError, TypeError):
+ raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time)
+ # Build a unicode key for this fragment and all vary-on's.
+ args = md5_constructor(u':'.join([urlquote(resolve_variable(var, context)) for var in self.vary_on]))
+ cache_key = 'template.cache.%s.%s' % (self.fragment_name, args.hexdigest())
+ value = cache.get(cache_key)
+ if value is None:
+ value = self.nodelist.render(context)
+ cache.set(cache_key, value, expire_time)
+ return value
+
+def do_cache(parser, token):
+ """
+ This will cache the contents of a template fragment for a given amount
+ of time.
+
+ Usage::
+
+ {% load cache %}
+ {% cache [expire_time] [fragment_name] %}
+ .. some expensive processing ..
+ {% endcache %}
+
+ This tag also supports varying by a list of arguments::
+
+ {% load cache %}
+ {% cache [expire_time] [fragment_name] [var1] [var2] .. %}
+ .. some expensive processing ..
+ {% endcache %}
+
+ Each unique set of arguments will result in a unique cache entry.
+ """
+ nodelist = parser.parse(('endcache',))
+ parser.delete_first_token()
+ tokens = token.contents.split()
+ if len(tokens) < 3:
+ raise TemplateSyntaxError(u"'%r' tag requires at least 2 arguments." % tokens[0])
+ return CacheNode(nodelist, tokens[1], tokens[2], tokens[3:])
+
+register.tag('cache', do_cache)
diff --git a/google/appengine/_internal/django/templatetags/i18n.py b/google/appengine/_internal/django/templatetags/i18n.py
new file mode 100644
index 0000000..7e9a7aa
--- /dev/null
+++ b/google/appengine/_internal/django/templatetags/i18n.py
@@ -0,0 +1,274 @@
+import re
+
+from google.appengine._internal.django.template import Node, Variable, VariableNode, _render_value_in_context
+from google.appengine._internal.django.template import TemplateSyntaxError, TokenParser, Library
+from google.appengine._internal.django.template import TOKEN_TEXT, TOKEN_VAR
+from google.appengine._internal.django.utils import translation
+from google.appengine._internal.django.utils.encoding import force_unicode
+
+register = Library()
+
+class GetAvailableLanguagesNode(Node):
+ def __init__(self, variable):
+ self.variable = variable
+
+ def render(self, context):
+ from google.appengine._internal.django.conf import settings
+ context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
+ return ''
+
+class GetCurrentLanguageNode(Node):
+ def __init__(self, variable):
+ self.variable = variable
+
+ def render(self, context):
+ context[self.variable] = translation.get_language()
+ return ''
+
+class GetCurrentLanguageBidiNode(Node):
+ def __init__(self, variable):
+ self.variable = variable
+
+ def render(self, context):
+ context[self.variable] = translation.get_language_bidi()
+ return ''
+
+class TranslateNode(Node):
+ def __init__(self, filter_expression, noop):
+ self.noop = noop
+ self.filter_expression = filter_expression
+ if isinstance(self.filter_expression.var, basestring):
+ self.filter_expression.var = Variable(u"'%s'" % self.filter_expression.var)
+
+ def render(self, context):
+ self.filter_expression.var.translate = not self.noop
+ output = self.filter_expression.resolve(context)
+ return _render_value_in_context(output, context)
+
+class BlockTranslateNode(Node):
+ def __init__(self, extra_context, singular, plural=None, countervar=None,
+ counter=None):
+ self.extra_context = extra_context
+ self.singular = singular
+ self.plural = plural
+ self.countervar = countervar
+ self.counter = counter
+
+ def render_token_list(self, tokens):
+ result = []
+ vars = []
+ for token in tokens:
+ if token.token_type == TOKEN_TEXT:
+ result.append(token.contents)
+ elif token.token_type == TOKEN_VAR:
+ result.append(u'%%(%s)s' % token.contents)
+ vars.append(token.contents)
+ return ''.join(result), vars
+
+ def render(self, context):
+ tmp_context = {}
+ for var, val in self.extra_context.items():
+ tmp_context[var] = val.render(context)
+ # Update() works like a push(), so corresponding context.pop() is at
+ # the end of function
+ context.update(tmp_context)
+ singular, vars = self.render_token_list(self.singular)
+ if self.plural and self.countervar and self.counter:
+ count = self.counter.resolve(context)
+ context[self.countervar] = count
+ plural, plural_vars = self.render_token_list(self.plural)
+ result = translation.ungettext(singular, plural, count)
+ vars.extend(plural_vars)
+ else:
+ result = translation.ugettext(singular)
+ # Escape all isolated '%' before substituting in the context.
+ result = re.sub(u'%(?!\()', u'%%', result)
+ data = dict([(v, _render_value_in_context(context[v], context)) for v in vars])
+ context.pop()
+ return result % data
+
+def do_get_available_languages(parser, token):
+ """
+ This will store a list of available languages
+ in the context.
+
+ Usage::
+
+ {% get_available_languages as languages %}
+ {% for language in languages %}
+ ...
+ {% endfor %}
+
+ This will just pull the LANGUAGES setting from
+ your setting file (or the default settings) and
+ put it into the named variable.
+ """
+ args = token.contents.split()
+ if len(args) != 3 or args[1] != 'as':
+ raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args)
+ return GetAvailableLanguagesNode(args[2])
+
+def do_get_current_language(parser, token):
+ """
+ This will store the current language in the context.
+
+ Usage::
+
+ {% get_current_language as language %}
+
+ This will fetch the currently active language and
+ put it's value into the ``language`` context
+ variable.
+ """
+ args = token.contents.split()
+ if len(args) != 3 or args[1] != 'as':
+ raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args)
+ return GetCurrentLanguageNode(args[2])
+
+def do_get_current_language_bidi(parser, token):
+ """
+ This will store the current language layout in the context.
+
+ Usage::
+
+ {% get_current_language_bidi as bidi %}
+
+ This will fetch the currently active language's layout and
+ put it's value into the ``bidi`` context variable.
+ True indicates right-to-left layout, otherwise left-to-right
+ """
+ args = token.contents.split()
+ if len(args) != 3 or args[1] != 'as':
+ raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args)
+ return GetCurrentLanguageBidiNode(args[2])
+
+def do_translate(parser, token):
+ """
+ This will mark a string for translation and will
+ translate the string for the current language.
+
+ Usage::
+
+ {% trans "this is a test" %}
+
+ This will mark the string for translation so it will
+ be pulled out by mark-messages.py into the .po files
+ and will run the string through the translation engine.
+
+ There is a second form::
+
+ {% trans "this is a test" noop %}
+
+ This will only mark for translation, but will return
+ the string unchanged. Use it when you need to store
+ values into forms that should be translated later on.
+
+ You can use variables instead of constant strings
+ to translate stuff you marked somewhere else::
+
+ {% trans variable %}
+
+ This will just try to translate the contents of
+ the variable ``variable``. Make sure that the string
+ in there is something that is in the .po file.
+ """
+ class TranslateParser(TokenParser):
+ def top(self):
+ value = self.value()
+
+ # Backwards Compatiblity fix:
+ # FilterExpression does not support single-quoted strings,
+ # so we make a cheap localized fix in order to maintain
+ # backwards compatibility with existing uses of ``trans``
+ # where single quote use is supported.
+ if value[0] == "'":
+ pos = None
+ m = re.match("^'([^']+)'(\|.*$)",value)
+ if m:
+ value = '"%s"%s' % (m.group(1).replace('"','\\"'),m.group(2))
+ elif value[-1] == "'":
+ value = '"%s"' % value[1:-1].replace('"','\\"')
+
+ if self.more():
+ if self.tag() == 'noop':
+ noop = True
+ else:
+ raise TemplateSyntaxError("only option for 'trans' is 'noop'")
+ else:
+ noop = False
+ return (value, noop)
+ value, noop = TranslateParser(token.contents).top()
+ return TranslateNode(parser.compile_filter(value), noop)
+
+def do_block_translate(parser, token):
+ """
+ This will translate a block of text with parameters.
+
+ Usage::
+
+ {% blocktrans with foo|filter as bar and baz|filter as boo %}
+ This is {{ bar }} and {{ boo }}.
+ {% endblocktrans %}
+
+ Additionally, this supports pluralization::
+
+ {% blocktrans count var|length as count %}
+ There is {{ count }} object.
+ {% plural %}
+ There are {{ count }} objects.
+ {% endblocktrans %}
+
+ This is much like ngettext, only in template syntax.
+ """
+ class BlockTranslateParser(TokenParser):
+ def top(self):
+ countervar = None
+ counter = None
+ extra_context = {}
+ while self.more():
+ tag = self.tag()
+ if tag == 'with' or tag == 'and':
+ value = self.value()
+ if self.tag() != 'as':
+ raise TemplateSyntaxError("variable bindings in 'blocktrans' must be 'with value as variable'")
+ extra_context[self.tag()] = VariableNode(
+ parser.compile_filter(value))
+ elif tag == 'count':
+ counter = parser.compile_filter(self.value())
+ if self.tag() != 'as':
+ raise TemplateSyntaxError("counter specification in 'blocktrans' must be 'count value as variable'")
+ countervar = self.tag()
+ else:
+ raise TemplateSyntaxError("unknown subtag %s for 'blocktrans' found" % tag)
+ return (countervar, counter, extra_context)
+
+ countervar, counter, extra_context = BlockTranslateParser(token.contents).top()
+
+ singular = []
+ plural = []
+ while parser.tokens:
+ token = parser.next_token()
+ if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
+ singular.append(token)
+ else:
+ break
+ if countervar and counter:
+ if token.contents.strip() != 'plural':
+ raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
+ while parser.tokens:
+ token = parser.next_token()
+ if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
+ plural.append(token)
+ else:
+ break
+ if token.contents.strip() != 'endblocktrans':
+ raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)
+
+ return BlockTranslateNode(extra_context, singular, plural, countervar,
+ counter)
+
+register.tag('get_available_languages', do_get_available_languages)
+register.tag('get_current_language', do_get_current_language)
+register.tag('get_current_language_bidi', do_get_current_language_bidi)
+register.tag('trans', do_translate)
+register.tag('blocktrans', do_block_translate)
diff --git a/google/appengine/_internal/django/utils/__init__.py b/google/appengine/_internal/django/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/django/utils/__init__.py
diff --git a/google/appengine/_internal/django/utils/_os.py b/google/appengine/_internal/django/utils/_os.py
new file mode 100644
index 0000000..e228738
--- /dev/null
+++ b/google/appengine/_internal/django/utils/_os.py
@@ -0,0 +1,45 @@
+import os
+from os.path import join, normcase, normpath, abspath, isabs, sep
+from google.appengine._internal.django.utils.encoding import force_unicode
+
+# Define our own abspath function that can handle joining
+# unicode paths to a current working directory that has non-ASCII
+# characters in it. This isn't necessary on Windows since the
+# Windows version of abspath handles this correctly. The Windows
+# abspath also handles drive letters differently than the pure
+# Python implementation, so it's best not to replace it.
+if os.name == 'nt':
+ abspathu = abspath
+else:
+ def abspathu(path):
+ """
+ Version of os.path.abspath that uses the unicode representation
+ of the current working directory, thus avoiding a UnicodeDecodeError
+ in join when the cwd has non-ASCII characters.
+ """
+ if not isabs(path):
+ path = join(os.getcwdu(), path)
+ return normpath(path)
+
+def safe_join(base, *paths):
+ """
+ Joins one or more path components to the base path component intelligently.
+ Returns a normalized, absolute version of the final path.
+
+ The final path must be located inside of the base path component (otherwise
+ a ValueError is raised).
+ """
+ # We need to use normcase to ensure we don't false-negative on case
+ # insensitive operating systems (like Windows).
+ base = force_unicode(base)
+ paths = [force_unicode(p) for p in paths]
+ final_path = normcase(abspathu(join(base, *paths)))
+ base_path = normcase(abspathu(base))
+ base_path_len = len(base_path)
+ # Ensure final_path starts with base_path and that the next character after
+ # the final path is os.sep (or nothing, in which case final_path must be
+ # equal to base_path).
+ if not final_path.startswith(base_path) or final_path[base_path_len:base_path_len+1] not in ('', sep):
+ raise ValueError('the joined path is located outside of the base path'
+ ' component')
+ return final_path
diff --git a/google/appengine/_internal/django/utils/_threading_local.py b/google/appengine/_internal/django/utils/_threading_local.py
new file mode 100644
index 0000000..9225d01
--- /dev/null
+++ b/google/appengine/_internal/django/utils/_threading_local.py
@@ -0,0 +1,240 @@
+"""Thread-local objects
+
+(Note that this module provides a Python version of thread
+ threading.local class. Depending on the version of Python you're
+ using, there may be a faster one available. You should always import
+ the local class from threading.)
+
+Thread-local objects support the management of thread-local data.
+If you have data that you want to be local to a thread, simply create
+a thread-local object and use its attributes:
+
+ >>> mydata = local()
+ >>> mydata.number = 42
+ >>> mydata.number
+ 42
+
+You can also access the local-object's dictionary:
+
+ >>> mydata.__dict__
+ {'number': 42}
+ >>> mydata.__dict__.setdefault('widgets', [])
+ []
+ >>> mydata.widgets
+ []
+
+What's important about thread-local objects is that their data are
+local to a thread. If we access the data in a different thread:
+
+ >>> log = []
+ >>> def f():
+ ... items = mydata.__dict__.items()
+ ... items.sort()
+ ... log.append(items)
+ ... mydata.number = 11
+ ... log.append(mydata.number)
+
+ >>> import threading
+ >>> thread = threading.Thread(target=f)
+ >>> thread.start()
+ >>> thread.join()
+ >>> log
+ [[], 11]
+
+we get different data. Furthermore, changes made in the other thread
+don't affect data seen in this thread:
+
+ >>> mydata.number
+ 42
+
+Of course, values you get from a local object, including a __dict__
+attribute, are for whatever thread was current at the time the
+attribute was read. For that reason, you generally don't want to save
+these values across threads, as they apply only to the thread they
+came from.
+
+You can create custom local objects by subclassing the local class:
+
+ >>> class MyLocal(local):
+ ... number = 2
+ ... initialized = False
+ ... def __init__(self, **kw):
+ ... if self.initialized:
+ ... raise SystemError('__init__ called too many times')
+ ... self.initialized = True
+ ... self.__dict__.update(kw)
+ ... def squared(self):
+ ... return self.number ** 2
+
+This can be useful to support default values, methods and
+initialization. Note that if you define an __init__ method, it will be
+called each time the local object is used in a separate thread. This
+is necessary to initialize each thread's dictionary.
+
+Now if we create a local object:
+
+ >>> mydata = MyLocal(color='red')
+
+Now we have a default number:
+
+ >>> mydata.number
+ 2
+
+an initial color:
+
+ >>> mydata.color
+ 'red'
+ >>> del mydata.color
+
+And a method that operates on the data:
+
+ >>> mydata.squared()
+ 4
+
+As before, we can access the data in a separate thread:
+
+ >>> log = []
+ >>> thread = threading.Thread(target=f)
+ >>> thread.start()
+ >>> thread.join()
+ >>> log
+ [[('color', 'red'), ('initialized', True)], 11]
+
+without affecting this thread's data:
+
+ >>> mydata.number
+ 2
+ >>> mydata.color
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'MyLocal' object has no attribute 'color'
+
+Note that subclasses can define slots, but they are not thread
+local. They are shared across threads:
+
+ >>> class MyLocal(local):
+ ... __slots__ = 'number'
+
+ >>> mydata = MyLocal()
+ >>> mydata.number = 42
+ >>> mydata.color = 'red'
+
+So, the separate thread:
+
+ >>> thread = threading.Thread(target=f)
+ >>> thread.start()
+ >>> thread.join()
+
+affects what we see:
+
+ >>> mydata.number
+ 11
+
+>>> del mydata
+"""
+
+# Threading import is at end
+
+class _localbase(object):
+ __slots__ = '_local__key', '_local__args', '_local__lock'
+
+ def __new__(cls, *args, **kw):
+ self = object.__new__(cls)
+ key = '_local__key', 'thread.local.' + str(id(self))
+ object.__setattr__(self, '_local__key', key)
+ object.__setattr__(self, '_local__args', (args, kw))
+ object.__setattr__(self, '_local__lock', RLock())
+
+ if (args or kw) and (cls.__init__ is object.__init__):
+ raise TypeError("Initialization arguments are not supported")
+
+ # We need to create the thread dict in anticipation of
+ # __init__ being called, to make sure we don't call it
+ # again ourselves.
+ dict = object.__getattribute__(self, '__dict__')
+ currentThread().__dict__[key] = dict
+
+ return self
+
+def _patch(self):
+ key = object.__getattribute__(self, '_local__key')
+ d = currentThread().__dict__.get(key)
+ if d is None:
+ d = {}
+ currentThread().__dict__[key] = d
+ object.__setattr__(self, '__dict__', d)
+
+ # we have a new instance dict, so call out __init__ if we have
+ # one
+ cls = type(self)
+ if cls.__init__ is not object.__init__:
+ args, kw = object.__getattribute__(self, '_local__args')
+ cls.__init__(self, *args, **kw)
+ else:
+ object.__setattr__(self, '__dict__', d)
+
+class local(_localbase):
+
+ def __getattribute__(self, name):
+ lock = object.__getattribute__(self, '_local__lock')
+ lock.acquire()
+ try:
+ _patch(self)
+ return object.__getattribute__(self, name)
+ finally:
+ lock.release()
+
+ def __setattr__(self, name, value):
+ lock = object.__getattribute__(self, '_local__lock')
+ lock.acquire()
+ try:
+ _patch(self)
+ return object.__setattr__(self, name, value)
+ finally:
+ lock.release()
+
+ def __delattr__(self, name):
+ lock = object.__getattribute__(self, '_local__lock')
+ lock.acquire()
+ try:
+ _patch(self)
+ return object.__delattr__(self, name)
+ finally:
+ lock.release()
+
+
+ def __del__():
+ threading_enumerate = enumerate
+ __getattribute__ = object.__getattribute__
+
+ def __del__(self):
+ key = __getattribute__(self, '_local__key')
+
+ try:
+ threads = list(threading_enumerate())
+ except:
+ # if enumerate fails, as it seems to do during
+ # shutdown, we'll skip cleanup under the assumption
+ # that there is nothing to clean up
+ return
+
+ for thread in threads:
+ try:
+ __dict__ = thread.__dict__
+ except AttributeError:
+ # Thread is dying, rest in peace
+ continue
+
+ if key in __dict__:
+ try:
+ del __dict__[key]
+ except KeyError:
+ pass # didn't have anything in this thread
+
+ return __del__
+ __del__ = __del__()
+
+try:
+ from threading import currentThread, enumerate, RLock
+except ImportError:
+ from dummy_threading import currentThread, enumerate, RLock
diff --git a/google/appengine/_internal/django/utils/autoreload.py b/google/appengine/_internal/django/utils/autoreload.py
new file mode 100644
index 0000000..51aaccd
--- /dev/null
+++ b/google/appengine/_internal/django/utils/autoreload.py
@@ -0,0 +1,119 @@
+# Autoreloading launcher.
+# Borrowed from Peter Hunt and the CherryPy project (http://www.cherrypy.org).
+# Some taken from Ian Bicking's Paste (http://pythonpaste.org/).
+#
+# Portions copyright (c) 2004, CherryPy Team (team@cherrypy.org)
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * Neither the name of the CherryPy Team nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os, sys, time
+
+try:
+ import thread
+except ImportError:
+ import dummy_thread as thread
+
+# This import does nothing, but it's necessary to avoid some race conditions
+# in the threading module. See http://code.djangoproject.com/ticket/2330 .
+try:
+ import threading
+except ImportError:
+ pass
+
+
+RUN_RELOADER = True
+
+_mtimes = {}
+_win = (sys.platform == "win32")
+
+def code_changed():
+ global _mtimes, _win
+ for filename in filter(lambda v: v, map(lambda m: getattr(m, "__file__", None), sys.modules.values())):
+ if filename.endswith(".pyc") or filename.endswith(".pyo"):
+ filename = filename[:-1]
+ if not os.path.exists(filename):
+ continue # File might be in an egg, so it can't be reloaded.
+ stat = os.stat(filename)
+ mtime = stat.st_mtime
+ if _win:
+ mtime -= stat.st_ctime
+ if filename not in _mtimes:
+ _mtimes[filename] = mtime
+ continue
+ if mtime != _mtimes[filename]:
+ _mtimes = {}
+ return True
+ return False
+
+def reloader_thread():
+ while RUN_RELOADER:
+ if code_changed():
+ sys.exit(3) # force reload
+ time.sleep(1)
+
+def restart_with_reloader():
+ while True:
+ args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] + sys.argv
+ if sys.platform == "win32":
+ args = ['"%s"' % arg for arg in args]
+ new_environ = os.environ.copy()
+ new_environ["RUN_MAIN"] = 'true'
+ exit_code = os.spawnve(os.P_WAIT, sys.executable, args, new_environ)
+ if exit_code != 3:
+ return exit_code
+
+def python_reloader(main_func, args, kwargs):
+ if os.environ.get("RUN_MAIN") == "true":
+ thread.start_new_thread(main_func, args, kwargs)
+ try:
+ reloader_thread()
+ except KeyboardInterrupt:
+ pass
+ else:
+ try:
+ sys.exit(restart_with_reloader())
+ except KeyboardInterrupt:
+ pass
+
+def jython_reloader(main_func, args, kwargs):
+ from _systemrestart import SystemRestart
+ thread.start_new_thread(main_func, args)
+ while True:
+ if code_changed():
+ raise SystemRestart
+ time.sleep(1)
+
+
+def main(main_func, args=None, kwargs=None):
+ if args is None:
+ args = ()
+ if kwargs is None:
+ kwargs = {}
+ if sys.platform.startswith('java'):
+ reloader = jython_reloader
+ else:
+ reloader = python_reloader
+ reloader(main_func, args, kwargs)
+
diff --git a/google/appengine/_internal/django/utils/cache.py b/google/appengine/_internal/django/utils/cache.py
new file mode 100644
index 0000000..6629368
--- /dev/null
+++ b/google/appengine/_internal/django/utils/cache.py
@@ -0,0 +1,228 @@
+"""
+This module contains helper functions for controlling caching. It does so by
+managing the "Vary" header of responses. It includes functions to patch the
+header of response objects directly and decorators that change functions to do
+that header-patching themselves.
+
+For information on the Vary header, see:
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.44
+
+Essentially, the "Vary" HTTP header defines which headers a cache should take
+into account when building its cache key. Requests with the same path but
+different header content for headers named in "Vary" need to get different
+cache keys to prevent delivery of wrong content.
+
+An example: i18n middleware would need to distinguish caches by the
+"Accept-language" header.
+"""
+
+import re
+import time
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.core.cache import cache
+from google.appengine._internal.django.utils.encoding import smart_str, iri_to_uri
+from google.appengine._internal.django.utils.http import http_date
+from google.appengine._internal.django.utils.hashcompat import md5_constructor
+from google.appengine._internal.django.utils.translation import get_language
+from google.appengine._internal.django.http import HttpRequest
+
+cc_delim_re = re.compile(r'\s*,\s*')
+
+def patch_cache_control(response, **kwargs):
+ """
+ This function patches the Cache-Control header by adding all
+ keyword arguments to it. The transformation is as follows:
+
+ * All keyword parameter names are turned to lowercase, and underscores
+ are converted to hyphens.
+ * If the value of a parameter is True (exactly True, not just a
+ true value), only the parameter name is added to the header.
+ * All other parameters are added with their value, after applying
+ str() to it.
+ """
+ def dictitem(s):
+ t = s.split('=', 1)
+ if len(t) > 1:
+ return (t[0].lower(), t[1])
+ else:
+ return (t[0].lower(), True)
+
+ def dictvalue(t):
+ if t[1] is True:
+ return t[0]
+ else:
+ return t[0] + '=' + smart_str(t[1])
+
+ if response.has_header('Cache-Control'):
+ cc = cc_delim_re.split(response['Cache-Control'])
+ cc = dict([dictitem(el) for el in cc])
+ else:
+ cc = {}
+
+ # If there's already a max-age header but we're being asked to set a new
+ # max-age, use the minimum of the two ages. In practice this happens when
+ # a decorator and a piece of middleware both operate on a given view.
+ if 'max-age' in cc and 'max_age' in kwargs:
+ kwargs['max_age'] = min(cc['max-age'], kwargs['max_age'])
+
+ for (k, v) in kwargs.items():
+ cc[k.replace('_', '-')] = v
+ cc = ', '.join([dictvalue(el) for el in cc.items()])
+ response['Cache-Control'] = cc
+
+def get_max_age(response):
+ """
+ Returns the max-age from the response Cache-Control header as an integer
+ (or ``None`` if it wasn't found or wasn't an integer.
+ """
+ if not response.has_header('Cache-Control'):
+ return
+ cc = dict([_to_tuple(el) for el in
+ cc_delim_re.split(response['Cache-Control'])])
+ if 'max-age' in cc:
+ try:
+ return int(cc['max-age'])
+ except (ValueError, TypeError):
+ pass
+
+def patch_response_headers(response, cache_timeout=None):
+ """
+ Adds some useful headers to the given HttpResponse object:
+ ETag, Last-Modified, Expires and Cache-Control
+
+ Each header is only added if it isn't already set.
+
+ cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
+ by default.
+ """
+ if cache_timeout is None:
+ cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
+ if cache_timeout < 0:
+ cache_timeout = 0 # Can't have max-age negative
+ if not response.has_header('ETag'):
+ response['ETag'] = '"%s"' % md5_constructor(response.content).hexdigest()
+ if not response.has_header('Last-Modified'):
+ response['Last-Modified'] = http_date()
+ if not response.has_header('Expires'):
+ response['Expires'] = http_date(time.time() + cache_timeout)
+ patch_cache_control(response, max_age=cache_timeout)
+
+def add_never_cache_headers(response):
+ """
+ Adds headers to a response to indicate that a page should never be cached.
+ """
+ patch_response_headers(response, cache_timeout=-1)
+
+def patch_vary_headers(response, newheaders):
+ """
+ Adds (or updates) the "Vary" header in the given HttpResponse object.
+ newheaders is a list of header names that should be in "Vary". Existing
+ headers in "Vary" aren't removed.
+ """
+ # Note that we need to keep the original order intact, because cache
+ # implementations may rely on the order of the Vary contents in, say,
+ # computing an MD5 hash.
+ if response.has_header('Vary'):
+ vary_headers = cc_delim_re.split(response['Vary'])
+ else:
+ vary_headers = []
+ # Use .lower() here so we treat headers as case-insensitive.
+ existing_headers = set([header.lower() for header in vary_headers])
+ additional_headers = [newheader for newheader in newheaders
+ if newheader.lower() not in existing_headers]
+ response['Vary'] = ', '.join(vary_headers + additional_headers)
+
+def has_vary_header(response, header_query):
+ """
+ Checks to see if the response has a given header name in its Vary header.
+ """
+ if not response.has_header('Vary'):
+ return False
+ vary_headers = cc_delim_re.split(response['Vary'])
+ existing_headers = set([header.lower() for header in vary_headers])
+ return header_query.lower() in existing_headers
+
+def _i18n_cache_key_suffix(request, cache_key):
+ """If enabled, returns the cache key ending with a locale."""
+ if settings.USE_I18N:
+ # first check if LocaleMiddleware or another middleware added
+ # LANGUAGE_CODE to request, then fall back to the active language
+ # which in turn can also fall back to settings.LANGUAGE_CODE
+ cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
+ return cache_key
+
+def _generate_cache_key(request, headerlist, key_prefix):
+ """Returns a cache key from the headers given in the header list."""
+ ctx = md5_constructor()
+ for header in headerlist:
+ value = request.META.get(header, None)
+ if value is not None:
+ ctx.update(value)
+ path = md5_constructor(iri_to_uri(request.path))
+ cache_key = 'views.decorators.cache.cache_page.%s.%s.%s' % (
+ key_prefix, path.hexdigest(), ctx.hexdigest())
+ return _i18n_cache_key_suffix(request, cache_key)
+
+def _generate_cache_header_key(key_prefix, request):
+ """Returns a cache key for the header cache."""
+ path = md5_constructor(iri_to_uri(request.path))
+ cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
+ key_prefix, path.hexdigest())
+ return _i18n_cache_key_suffix(request, cache_key)
+
+def get_cache_key(request, key_prefix=None):
+ """
+ Returns a cache key based on the request path. It can be used in the
+ request phase because it pulls the list of headers to take into account
+ from the global path registry and uses those to build a cache key to check
+ against.
+
+ If there is no headerlist stored, the page needs to be rebuilt, so this
+ function returns None.
+ """
+ if key_prefix is None:
+ key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
+ cache_key = _generate_cache_header_key(key_prefix, request)
+ headerlist = cache.get(cache_key, None)
+ if headerlist is not None:
+ return _generate_cache_key(request, headerlist, key_prefix)
+ else:
+ return None
+
+def learn_cache_key(request, response, cache_timeout=None, key_prefix=None):
+ """
+ Learns what headers to take into account for some request path from the
+ response object. It stores those headers in a global path registry so that
+ later access to that path will know what headers to take into account
+ without building the response object itself. The headers are named in the
+ Vary header of the response, but we want to prevent response generation.
+
+ The list of headers to use for cache key generation is stored in the same
+ cache as the pages themselves. If the cache ages some data out of the
+ cache, this just means that we have to build the response once to get at
+ the Vary header and so at the list of headers to use for the cache key.
+ """
+ if key_prefix is None:
+ key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
+ if cache_timeout is None:
+ cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
+ cache_key = _generate_cache_header_key(key_prefix, request)
+ if response.has_header('Vary'):
+ headerlist = ['HTTP_'+header.upper().replace('-', '_')
+ for header in cc_delim_re.split(response['Vary'])]
+ cache.set(cache_key, headerlist, cache_timeout)
+ return _generate_cache_key(request, headerlist, key_prefix)
+ else:
+ # if there is no Vary header, we still need a cache key
+ # for the request.path
+ cache.set(cache_key, [], cache_timeout)
+ return _generate_cache_key(request, [], key_prefix)
+
+
+def _to_tuple(s):
+ t = s.split('=',1)
+ if len(t) == 2:
+ return t[0].lower(), t[1]
+ return t[0].lower(), True
diff --git a/google/appengine/_internal/django/utils/checksums.py b/google/appengine/_internal/django/utils/checksums.py
new file mode 100644
index 0000000..970f563
--- /dev/null
+++ b/google/appengine/_internal/django/utils/checksums.py
@@ -0,0 +1,22 @@
+"""
+Common checksum routines (used in multiple localflavor/ cases, for example).
+"""
+
+__all__ = ['luhn',]
+
+LUHN_ODD_LOOKUP = (0, 2, 4, 6, 8, 1, 3, 5, 7, 9) # sum_of_digits(index * 2)
+
+def luhn(candidate):
+ """
+ Checks a candidate number for validity according to the Luhn
+ algorithm (used in validation of, for example, credit cards).
+ Both numeric and string candidates are accepted.
+ """
+ if not isinstance(candidate, basestring):
+ candidate = str(candidate)
+ try:
+ evens = sum([int(c) for c in candidate[-1::-2]])
+ odds = sum([LUHN_ODD_LOOKUP[int(c)] for c in candidate[-2::-2]])
+ return ((evens + odds) % 10 == 0)
+ except ValueError: # Raised if an int conversion fails
+ return False
diff --git a/google/appengine/_internal/django/utils/copycompat.py b/google/appengine/_internal/django/utils/copycompat.py
new file mode 100644
index 0000000..22b3cfb
--- /dev/null
+++ b/google/appengine/_internal/django/utils/copycompat.py
@@ -0,0 +1,14 @@
+"""
+Fixes Python 2.4's failure to deepcopy unbound functions.
+"""
+
+import copy
+import types
+
+# Monkeypatch copy's deepcopy registry to handle functions correctly.
+if (hasattr(copy, '_deepcopy_dispatch') and types.FunctionType not in copy._deepcopy_dispatch):
+ copy._deepcopy_dispatch[types.FunctionType] = copy._deepcopy_atomic
+
+# Pose as the copy module now.
+del copy, types
+from copy import *
diff --git a/google/appengine/_internal/django/utils/daemonize.py b/google/appengine/_internal/django/utils/daemonize.py
new file mode 100644
index 0000000..68e5392
--- /dev/null
+++ b/google/appengine/_internal/django/utils/daemonize.py
@@ -0,0 +1,58 @@
+import os
+import sys
+
+if os.name == 'posix':
+ def become_daemon(our_home_dir='.', out_log='/dev/null',
+ err_log='/dev/null', umask=022):
+ "Robustly turn into a UNIX daemon, running in our_home_dir."
+ # First fork
+ try:
+ if os.fork() > 0:
+ sys.exit(0) # kill off parent
+ except OSError, e:
+ sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
+ sys.exit(1)
+ os.setsid()
+ os.chdir(our_home_dir)
+ os.umask(umask)
+
+ # Second fork
+ try:
+ if os.fork() > 0:
+ os._exit(0)
+ except OSError, e:
+ sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
+ os._exit(1)
+
+ si = open('/dev/null', 'r')
+ so = open(out_log, 'a+', 0)
+ se = open(err_log, 'a+', 0)
+ os.dup2(si.fileno(), sys.stdin.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(se.fileno(), sys.stderr.fileno())
+ # Set custom file descriptors so that they get proper buffering.
+ sys.stdout, sys.stderr = so, se
+else:
+ def become_daemon(our_home_dir='.', out_log=None, err_log=None, umask=022):
+ """
+ If we're not running under a POSIX system, just simulate the daemon
+ mode by doing redirections and directory changing.
+ """
+ os.chdir(our_home_dir)
+ os.umask(umask)
+ sys.stdin.close()
+ sys.stdout.close()
+ sys.stderr.close()
+ if err_log:
+ sys.stderr = open(err_log, 'a', 0)
+ else:
+ sys.stderr = NullDevice()
+ if out_log:
+ sys.stdout = open(out_log, 'a', 0)
+ else:
+ sys.stdout = NullDevice()
+
+ class NullDevice:
+ "A writeable object that writes to nowhere -- like /dev/null."
+ def write(self, s):
+ pass
diff --git a/google/appengine/_internal/django/utils/datastructures.py b/google/appengine/_internal/django/utils/datastructures.py
new file mode 100644
index 0000000..6fd1ae2
--- /dev/null
+++ b/google/appengine/_internal/django/utils/datastructures.py
@@ -0,0 +1,473 @@
+from types import GeneratorType
+
+from google.appengine._internal.django.utils.copycompat import deepcopy
+
+
+class MergeDict(object):
+ """
+ A simple class for creating new "virtual" dictionaries that actually look
+ up values in more than one dictionary, passed in the constructor.
+
+ If a key appears in more than one of the given dictionaries, only the
+ first occurrence will be used.
+ """
+ def __init__(self, *dicts):
+ self.dicts = dicts
+
+ def __getitem__(self, key):
+ for dict_ in self.dicts:
+ try:
+ return dict_[key]
+ except KeyError:
+ pass
+ raise KeyError
+
+ def __copy__(self):
+ return self.__class__(*self.dicts)
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def getlist(self, key):
+ for dict_ in self.dicts:
+ if key in dict_.keys():
+ return dict_.getlist(key)
+ return []
+
+ def iteritems(self):
+ seen = set()
+ for dict_ in self.dicts:
+ for item in dict_.iteritems():
+ k, v = item
+ if k in seen:
+ continue
+ seen.add(k)
+ yield item
+
+ def iterkeys(self):
+ for k, v in self.iteritems():
+ yield k
+
+ def itervalues(self):
+ for k, v in self.iteritems():
+ yield v
+
+ def items(self):
+ return list(self.iteritems())
+
+ def keys(self):
+ return list(self.iterkeys())
+
+ def values(self):
+ return list(self.itervalues())
+
+ def has_key(self, key):
+ for dict_ in self.dicts:
+ if key in dict_:
+ return True
+ return False
+
+ __contains__ = has_key
+ __iter__ = iterkeys
+
+ def copy(self):
+ """Returns a copy of this object."""
+ return self.__copy__()
+
+class SortedDict(dict):
+ """
+ A dictionary that keeps its keys in the order in which they're inserted.
+ """
+ def __new__(cls, *args, **kwargs):
+ instance = super(SortedDict, cls).__new__(cls, *args, **kwargs)
+ instance.keyOrder = []
+ return instance
+
+ def __init__(self, data=None):
+ if data is None:
+ data = {}
+ elif isinstance(data, GeneratorType):
+ # Unfortunately we need to be able to read a generator twice. Once
+ # to get the data into self with our super().__init__ call and a
+ # second time to setup keyOrder correctly
+ data = list(data)
+ super(SortedDict, self).__init__(data)
+ if isinstance(data, dict):
+ self.keyOrder = data.keys()
+ else:
+ self.keyOrder = []
+ seen = set()
+ for key, value in data:
+ if key not in seen:
+ self.keyOrder.append(key)
+ seen.add(key)
+
+ def __deepcopy__(self, memo):
+ return self.__class__([(key, deepcopy(value, memo))
+ for key, value in self.iteritems()])
+
+ def __setitem__(self, key, value):
+ if key not in self:
+ self.keyOrder.append(key)
+ super(SortedDict, self).__setitem__(key, value)
+
+ def __delitem__(self, key):
+ super(SortedDict, self).__delitem__(key)
+ self.keyOrder.remove(key)
+
+ def __iter__(self):
+ return iter(self.keyOrder)
+
+ def pop(self, k, *args):
+ result = super(SortedDict, self).pop(k, *args)
+ try:
+ self.keyOrder.remove(k)
+ except ValueError:
+ # Key wasn't in the dictionary in the first place. No problem.
+ pass
+ return result
+
+ def popitem(self):
+ result = super(SortedDict, self).popitem()
+ self.keyOrder.remove(result[0])
+ return result
+
+ def items(self):
+ return zip(self.keyOrder, self.values())
+
+ def iteritems(self):
+ for key in self.keyOrder:
+ yield key, self[key]
+
+ def keys(self):
+ return self.keyOrder[:]
+
+ def iterkeys(self):
+ return iter(self.keyOrder)
+
+ def values(self):
+ return map(self.__getitem__, self.keyOrder)
+
+ def itervalues(self):
+ for key in self.keyOrder:
+ yield self[key]
+
+ def update(self, dict_):
+ for k, v in dict_.iteritems():
+ self[k] = v
+
+ def setdefault(self, key, default):
+ if key not in self:
+ self.keyOrder.append(key)
+ return super(SortedDict, self).setdefault(key, default)
+
+ def value_for_index(self, index):
+ """Returns the value of the item at the given zero-based index."""
+ return self[self.keyOrder[index]]
+
+ def insert(self, index, key, value):
+ """Inserts the key, value pair before the item with the given index."""
+ if key in self.keyOrder:
+ n = self.keyOrder.index(key)
+ del self.keyOrder[n]
+ if n < index:
+ index -= 1
+ self.keyOrder.insert(index, key)
+ super(SortedDict, self).__setitem__(key, value)
+
+ def copy(self):
+ """Returns a copy of this object."""
+ # This way of initializing the copy means it works for subclasses, too.
+ obj = self.__class__(self)
+ obj.keyOrder = self.keyOrder[:]
+ return obj
+
+ def __repr__(self):
+ """
+ Replaces the normal dict.__repr__ with a version that returns the keys
+ in their sorted order.
+ """
+ return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in self.items()])
+
+ def clear(self):
+ super(SortedDict, self).clear()
+ self.keyOrder = []
+
+class MultiValueDictKeyError(KeyError):
+ pass
+
+class MultiValueDict(dict):
+ """
+ A subclass of dictionary customized to handle multiple values for the
+ same key.
+
+ >>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
+ >>> d['name']
+ 'Simon'
+ >>> d.getlist('name')
+ ['Adrian', 'Simon']
+ >>> d.get('lastname', 'nonexistent')
+ 'nonexistent'
+ >>> d.setlist('lastname', ['Holovaty', 'Willison'])
+
+ This class exists to solve the irritating problem raised by cgi.parse_qs,
+ which returns a list for every key, even though most Web forms submit
+ single name-value pairs.
+ """
+ def __init__(self, key_to_list_mapping=()):
+ super(MultiValueDict, self).__init__(key_to_list_mapping)
+
+ def __repr__(self):
+ return "<%s: %s>" % (self.__class__.__name__,
+ super(MultiValueDict, self).__repr__())
+
+ def __getitem__(self, key):
+ """
+ Returns the last data value for this key, or [] if it's an empty list;
+ raises KeyError if not found.
+ """
+ try:
+ list_ = super(MultiValueDict, self).__getitem__(key)
+ except KeyError:
+ raise MultiValueDictKeyError("Key %r not found in %r" % (key, self))
+ try:
+ return list_[-1]
+ except IndexError:
+ return []
+
+ def __setitem__(self, key, value):
+ super(MultiValueDict, self).__setitem__(key, [value])
+
+ def __copy__(self):
+ return self.__class__(super(MultiValueDict, self).items())
+
+ def __deepcopy__(self, memo=None):
+ import google.appengine._internal.django.utils.copycompat as copy
+ if memo is None:
+ memo = {}
+ result = self.__class__()
+ memo[id(self)] = result
+ for key, value in dict.items(self):
+ dict.__setitem__(result, copy.deepcopy(key, memo),
+ copy.deepcopy(value, memo))
+ return result
+
+ def __getstate__(self):
+ obj_dict = self.__dict__.copy()
+ obj_dict['_data'] = dict([(k, self.getlist(k)) for k in self])
+ return obj_dict
+
+ def __setstate__(self, obj_dict):
+ data = obj_dict.pop('_data', {})
+ for k, v in data.items():
+ self.setlist(k, v)
+ self.__dict__.update(obj_dict)
+
+ def get(self, key, default=None):
+ """
+ Returns the last data value for the passed key. If key doesn't exist
+ or value is an empty list, then default is returned.
+ """
+ try:
+ val = self[key]
+ except KeyError:
+ return default
+ if val == []:
+ return default
+ return val
+
+ def getlist(self, key):
+ """
+ Returns the list of values for the passed key. If key doesn't exist,
+ then an empty list is returned.
+ """
+ try:
+ return super(MultiValueDict, self).__getitem__(key)
+ except KeyError:
+ return []
+
+ def setlist(self, key, list_):
+ super(MultiValueDict, self).__setitem__(key, list_)
+
+ def setdefault(self, key, default=None):
+ if key not in self:
+ self[key] = default
+ return self[key]
+
+ def setlistdefault(self, key, default_list=()):
+ if key not in self:
+ self.setlist(key, default_list)
+ return self.getlist(key)
+
+ def appendlist(self, key, value):
+ """Appends an item to the internal list associated with key."""
+ self.setlistdefault(key, [])
+ super(MultiValueDict, self).__setitem__(key, self.getlist(key) + [value])
+
+ def items(self):
+ """
+ Returns a list of (key, value) pairs, where value is the last item in
+ the list associated with the key.
+ """
+ return [(key, self[key]) for key in self.keys()]
+
+ def iteritems(self):
+ """
+ Yields (key, value) pairs, where value is the last item in the list
+ associated with the key.
+ """
+ for key in self.keys():
+ yield (key, self[key])
+
+ def lists(self):
+ """Returns a list of (key, list) pairs."""
+ return super(MultiValueDict, self).items()
+
+ def iterlists(self):
+ """Yields (key, list) pairs."""
+ return super(MultiValueDict, self).iteritems()
+
+ def values(self):
+ """Returns a list of the last value on every key list."""
+ return [self[key] for key in self.keys()]
+
+ def itervalues(self):
+ """Yield the last value on every key list."""
+ for key in self.iterkeys():
+ yield self[key]
+
+ def copy(self):
+ """Returns a copy of this object."""
+ return self.__deepcopy__()
+
+ def update(self, *args, **kwargs):
+ """
+ update() extends rather than replaces existing key lists.
+ Also accepts keyword args.
+ """
+ if len(args) > 1:
+ raise TypeError("update expected at most 1 arguments, got %d" % len(args))
+ if args:
+ other_dict = args[0]
+ if isinstance(other_dict, MultiValueDict):
+ for key, value_list in other_dict.lists():
+ self.setlistdefault(key, []).extend(value_list)
+ else:
+ try:
+ for key, value in other_dict.items():
+ self.setlistdefault(key, []).append(value)
+ except TypeError:
+ raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary")
+ for key, value in kwargs.iteritems():
+ self.setlistdefault(key, []).append(value)
+
+class DotExpandedDict(dict):
+ """
+ A special dictionary constructor that takes a dictionary in which the keys
+ may contain dots to specify inner dictionaries. It's confusing, but this
+ example should make sense.
+
+ >>> d = DotExpandedDict({'person.1.firstname': ['Simon'], \
+ 'person.1.lastname': ['Willison'], \
+ 'person.2.firstname': ['Adrian'], \
+ 'person.2.lastname': ['Holovaty']})
+ >>> d
+ {'person': {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}}
+ >>> d['person']
+ {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}
+ >>> d['person']['1']
+ {'lastname': ['Willison'], 'firstname': ['Simon']}
+
+ # Gotcha: Results are unpredictable if the dots are "uneven":
+ >>> DotExpandedDict({'c.1': 2, 'c.2': 3, 'c': 1})
+ {'c': 1}
+ """
+ def __init__(self, key_to_list_mapping):
+ for k, v in key_to_list_mapping.items():
+ current = self
+ bits = k.split('.')
+ for bit in bits[:-1]:
+ current = current.setdefault(bit, {})
+ # Now assign value to current position
+ try:
+ current[bits[-1]] = v
+ except TypeError: # Special-case if current isn't a dict.
+ current = {bits[-1]: v}
+
+class ImmutableList(tuple):
+ """
+ A tuple-like object that raises useful errors when it is asked to mutate.
+
+ Example::
+
+ >>> a = ImmutableList(range(5), warning="You cannot mutate this.")
+ >>> a[3] = '4'
+ Traceback (most recent call last):
+ ...
+ AttributeError: You cannot mutate this.
+ """
+
+ def __new__(cls, *args, **kwargs):
+ if 'warning' in kwargs:
+ warning = kwargs['warning']
+ del kwargs['warning']
+ else:
+ warning = 'ImmutableList object is immutable.'
+ self = tuple.__new__(cls, *args, **kwargs)
+ self.warning = warning
+ return self
+
+ def complain(self, *wargs, **kwargs):
+ if isinstance(self.warning, Exception):
+ raise self.warning
+ else:
+ raise AttributeError(self.warning)
+
+ # All list mutation functions complain.
+ __delitem__ = complain
+ __delslice__ = complain
+ __iadd__ = complain
+ __imul__ = complain
+ __setitem__ = complain
+ __setslice__ = complain
+ append = complain
+ extend = complain
+ insert = complain
+ pop = complain
+ remove = complain
+ sort = complain
+ reverse = complain
+
+class DictWrapper(dict):
+ """
+ Wraps accesses to a dictionary so that certain values (those starting with
+ the specified prefix) are passed through a function before being returned.
+ The prefix is removed before looking up the real value.
+
+ Used by the SQL construction code to ensure that values are correctly
+ quoted before being used.
+ """
+ def __init__(self, data, func, prefix):
+ super(DictWrapper, self).__init__(data)
+ self.func = func
+ self.prefix = prefix
+
+ def __getitem__(self, key):
+ """
+ Retrieves the real value after stripping the prefix string (if
+ present). If the prefix is present, pass the value through self.func
+ before returning, otherwise return the raw value.
+ """
+ if key.startswith(self.prefix):
+ use_func = True
+ key = key[len(self.prefix):]
+ else:
+ use_func = False
+ value = super(DictWrapper, self).__getitem__(key)
+ if use_func:
+ return self.func(value)
+ return value
+
diff --git a/google/appengine/_internal/django/utils/dateformat.py b/google/appengine/_internal/django/utils/dateformat.py
new file mode 100644
index 0000000..b2ac6ff
--- /dev/null
+++ b/google/appengine/_internal/django/utils/dateformat.py
@@ -0,0 +1,286 @@
+"""
+PHP date() style date formatting
+See http://www.php.net/date for format strings
+
+Usage:
+>>> import datetime
+>>> d = datetime.datetime.now()
+>>> df = DateFormat(d)
+>>> print df.format('jS F Y H:i')
+7th October 2003 11:39
+>>>
+"""
+
+import re
+import time
+import calendar
+from google.appengine._internal.django.utils.dates import MONTHS, MONTHS_3, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR
+from google.appengine._internal.django.utils.tzinfo import LocalTimezone
+from google.appengine._internal.django.utils.translation import ugettext as _
+from google.appengine._internal.django.utils.encoding import force_unicode
+
+re_formatchars = re.compile(r'(?<!\\)([aAbBcdDfFgGhHiIjlLmMnNOPrsStTUuwWyYzZ])')
+re_escaped = re.compile(r'\\(.)')
+
+class Formatter(object):
+ def format(self, formatstr):
+ pieces = []
+ for i, piece in enumerate(re_formatchars.split(force_unicode(formatstr))):
+ if i % 2:
+ pieces.append(force_unicode(getattr(self, piece)()))
+ elif piece:
+ pieces.append(re_escaped.sub(r'\1', piece))
+ return u''.join(pieces)
+
+class TimeFormat(Formatter):
+ def __init__(self, t):
+ self.data = t
+
+ def a(self):
+ "'a.m.' or 'p.m.'"
+ if self.data.hour > 11:
+ return _('p.m.')
+ return _('a.m.')
+
+ def A(self):
+ "'AM' or 'PM'"
+ if self.data.hour > 11:
+ return _('PM')
+ return _('AM')
+
+ def B(self):
+ "Swatch Internet time"
+ raise NotImplementedError
+
+ def f(self):
+ """
+ Time, in 12-hour hours and minutes, with minutes left off if they're
+ zero.
+ Examples: '1', '1:30', '2:05', '2'
+ Proprietary extension.
+ """
+ if self.data.minute == 0:
+ return self.g()
+ return u'%s:%s' % (self.g(), self.i())
+
+ def g(self):
+ "Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
+ if self.data.hour == 0:
+ return 12
+ if self.data.hour > 12:
+ return self.data.hour - 12
+ return self.data.hour
+
+ def G(self):
+ "Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
+ return self.data.hour
+
+ def h(self):
+ "Hour, 12-hour format; i.e. '01' to '12'"
+ return u'%02d' % self.g()
+
+ def H(self):
+ "Hour, 24-hour format; i.e. '00' to '23'"
+ return u'%02d' % self.G()
+
+ def i(self):
+ "Minutes; i.e. '00' to '59'"
+ return u'%02d' % self.data.minute
+
+ def P(self):
+ """
+ Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
+ if they're zero and the strings 'midnight' and 'noon' if appropriate.
+ Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
+ Proprietary extension.
+ """
+ if self.data.minute == 0 and self.data.hour == 0:
+ return _('midnight')
+ if self.data.minute == 0 and self.data.hour == 12:
+ return _('noon')
+ return u'%s %s' % (self.f(), self.a())
+
+ def s(self):
+ "Seconds; i.e. '00' to '59'"
+ return u'%02d' % self.data.second
+
+ def u(self):
+ "Microseconds"
+ return self.data.microsecond
+
+
+class DateFormat(TimeFormat):
+ year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
+
+ def __init__(self, dt):
+ # Accepts either a datetime or date object.
+ self.data = dt
+ self.timezone = getattr(dt, 'tzinfo', None)
+ if hasattr(self.data, 'hour') and not self.timezone:
+ self.timezone = LocalTimezone(dt)
+
+ def b(self):
+ "Month, textual, 3 letters, lowercase; e.g. 'jan'"
+ return MONTHS_3[self.data.month]
+
+ def c(self):
+ """
+ ISO 8601 Format
+ Example : '2008-01-02T10:30:00.000123'
+ """
+ return self.data.isoformat()
+
+ def d(self):
+ "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
+ return u'%02d' % self.data.day
+
+ def D(self):
+ "Day of the week, textual, 3 letters; e.g. 'Fri'"
+ return WEEKDAYS_ABBR[self.data.weekday()]
+
+ def F(self):
+ "Month, textual, long; e.g. 'January'"
+ return MONTHS[self.data.month]
+
+ def I(self):
+ "'1' if Daylight Savings Time, '0' otherwise."
+ if self.timezone and self.timezone.dst(self.data):
+ return u'1'
+ else:
+ return u'0'
+
+ def j(self):
+ "Day of the month without leading zeros; i.e. '1' to '31'"
+ return self.data.day
+
+ def l(self):
+ "Day of the week, textual, long; e.g. 'Friday'"
+ return WEEKDAYS[self.data.weekday()]
+
+ def L(self):
+ "Boolean for whether it is a leap year; i.e. True or False"
+ return calendar.isleap(self.data.year)
+
+ def m(self):
+ "Month; i.e. '01' to '12'"
+ return u'%02d' % self.data.month
+
+ def M(self):
+ "Month, textual, 3 letters; e.g. 'Jan'"
+ return MONTHS_3[self.data.month].title()
+
+ def n(self):
+ "Month without leading zeros; i.e. '1' to '12'"
+ return self.data.month
+
+ def N(self):
+ "Month abbreviation in Associated Press style. Proprietary extension."
+ return MONTHS_AP[self.data.month]
+
+ def O(self):
+ "Difference to Greenwich time in hours; e.g. '+0200'"
+ seconds = self.Z()
+ return u"%+03d%02d" % (seconds // 3600, (seconds // 60) % 60)
+
+ def r(self):
+ "RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
+ return self.format('D, j M Y H:i:s O')
+
+ def S(self):
+ "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
+ if self.data.day in (11, 12, 13): # Special case
+ return u'th'
+ last = self.data.day % 10
+ if last == 1:
+ return u'st'
+ if last == 2:
+ return u'nd'
+ if last == 3:
+ return u'rd'
+ return u'th'
+
+ def t(self):
+ "Number of days in the given month; i.e. '28' to '31'"
+ return u'%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
+
+ def T(self):
+ "Time zone of this machine; e.g. 'EST' or 'MDT'"
+ name = self.timezone and self.timezone.tzname(self.data) or None
+ if name is None:
+ name = self.format('O')
+ return unicode(name)
+
+ def U(self):
+ "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
+ if getattr(self.data, 'tzinfo', None):
+ return int(calendar.timegm(self.data.utctimetuple()))
+ else:
+ return int(time.mktime(self.data.timetuple()))
+
+ def w(self):
+ "Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
+ return (self.data.weekday() + 1) % 7
+
+ def W(self):
+ "ISO-8601 week number of year, weeks starting on Monday"
+ # Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
+ week_number = None
+ jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
+ weekday = self.data.weekday() + 1
+ day_of_year = self.z()
+ if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
+ if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
+ week_number = 53
+ else:
+ week_number = 52
+ else:
+ if calendar.isleap(self.data.year):
+ i = 366
+ else:
+ i = 365
+ if (i - day_of_year) < (4 - weekday):
+ week_number = 1
+ else:
+ j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
+ week_number = j // 7
+ if jan1_weekday > 4:
+ week_number -= 1
+ return week_number
+
+ def y(self):
+ "Year, 2 digits; e.g. '99'"
+ return unicode(self.data.year)[2:]
+
+ def Y(self):
+ "Year, 4 digits; e.g. '1999'"
+ return self.data.year
+
+ def z(self):
+ "Day of the year; i.e. '0' to '365'"
+ doy = self.year_days[self.data.month] + self.data.day
+ if self.L() and self.data.month > 2:
+ doy += 1
+ return doy
+
+ def Z(self):
+ """
+ Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
+ timezones west of UTC is always negative, and for those east of UTC is
+ always positive.
+ """
+ if not self.timezone:
+ return 0
+ offset = self.timezone.utcoffset(self.data)
+ # Only days can be negative, so negative offsets have days=-1 and
+ # seconds positive. Positive offsets have days=0
+ return offset.days * 86400 + offset.seconds
+
+def format(value, format_string):
+ "Convenience function"
+ df = DateFormat(value)
+ return df.format(format_string)
+
+def time_format(value, format_string):
+ "Convenience function"
+ tf = TimeFormat(value)
+ return tf.format(format_string)
diff --git a/google/appengine/_internal/django/utils/dates.py b/google/appengine/_internal/django/utils/dates.py
new file mode 100644
index 0000000..b7a0d3e
--- /dev/null
+++ b/google/appengine/_internal/django/utils/dates.py
@@ -0,0 +1,33 @@
+"Commonly-used date structures"
+
+from google.appengine._internal.django.utils.translation import ugettext_lazy as _
+
+WEEKDAYS = {
+ 0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'),
+ 5:_('Saturday'), 6:_('Sunday')
+}
+WEEKDAYS_ABBR = {
+ 0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'),
+ 5:_('Sat'), 6:_('Sun')
+}
+WEEKDAYS_REV = {
+ 'monday':0, 'tuesday':1, 'wednesday':2, 'thursday':3, 'friday':4,
+ 'saturday':5, 'sunday':6
+}
+MONTHS = {
+ 1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'),
+ 7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'),
+ 12:_('December')
+}
+MONTHS_3 = {
+ 1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'),
+ 7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec')
+}
+MONTHS_3_REV = {
+ 'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6, 'jul':7, 'aug':8,
+ 'sep':9, 'oct':10, 'nov':11, 'dec':12
+}
+MONTHS_AP = { # month names in Associated Press style
+ 1:_('Jan.'), 2:_('Feb.'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'), 7:_('July'),
+ 8:_('Aug.'), 9:_('Sept.'), 10:_('Oct.'), 11:_('Nov.'), 12:_('Dec.')
+}
diff --git a/google/appengine/_internal/django/utils/datetime_safe.py b/google/appengine/_internal/django/utils/datetime_safe.py
new file mode 100644
index 0000000..b634888
--- /dev/null
+++ b/google/appengine/_internal/django/utils/datetime_safe.py
@@ -0,0 +1,89 @@
+# Python's datetime strftime doesn't handle dates before 1900.
+# These classes override date and datetime to support the formatting of a date
+# through its full "proleptic Gregorian" date range.
+#
+# Based on code submitted to comp.lang.python by Andrew Dalke
+#
+# >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A")
+# '1850/08/02 was a Friday'
+
+from datetime import date as real_date, datetime as real_datetime
+import re
+import time
+
+class date(real_date):
+ def strftime(self, fmt):
+ return strftime(self, fmt)
+
+class datetime(real_datetime):
+ def strftime(self, fmt):
+ return strftime(self, fmt)
+
+ def combine(self, date, time):
+ return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo)
+
+ def date(self):
+ return date(self.year, self.month, self.day)
+
+def new_date(d):
+ "Generate a safe date from a datetime.date object."
+ return date(d.year, d.month, d.day)
+
+def new_datetime(d):
+ """
+ Generate a safe datetime from a datetime.date or datetime.datetime object.
+ """
+ kw = [d.year, d.month, d.day]
+ if isinstance(d, real_datetime):
+ kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo])
+ return datetime(*kw)
+
+# This library does not support strftime's "%s" or "%y" format strings.
+# Allowed if there's an even number of "%"s because they are escaped.
+_illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])")
+
+def _findall(text, substr):
+ # Also finds overlaps
+ sites = []
+ i = 0
+ while 1:
+ j = text.find(substr, i)
+ if j == -1:
+ break
+ sites.append(j)
+ i=j+1
+ return sites
+
+def strftime(dt, fmt):
+ if dt.year >= 1900:
+ return super(type(dt), dt).strftime(fmt)
+ illegal_formatting = _illegal_formatting.search(fmt)
+ if illegal_formatting:
+ raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0))
+
+ year = dt.year
+ # For every non-leap year century, advance by
+ # 6 years to get into the 28-year repeat cycle
+ delta = 2000 - year
+ off = 6 * (delta // 100 + delta // 400)
+ year = year + off
+
+ # Move to around the year 2000
+ year = year + ((2000 - year) // 28) * 28
+ timetuple = dt.timetuple()
+ s1 = time.strftime(fmt, (year,) + timetuple[1:])
+ sites1 = _findall(s1, str(year))
+
+ s2 = time.strftime(fmt, (year+28,) + timetuple[1:])
+ sites2 = _findall(s2, str(year+28))
+
+ sites = []
+ for site in sites1:
+ if site in sites2:
+ sites.append(site)
+
+ s = s1
+ syear = "%04d" % (dt.year,)
+ for site in sites:
+ s = s[:site] + syear + s[site+4:]
+ return s
diff --git a/google/appengine/_internal/django/utils/decorators.py b/google/appengine/_internal/django/utils/decorators.py
new file mode 100644
index 0000000..d18fec4
--- /dev/null
+++ b/google/appengine/_internal/django/utils/decorators.py
@@ -0,0 +1,90 @@
+"Functions that help with dynamically creating decorators for views."
+
+import types
+try:
+ from functools import wraps, update_wrapper, WRAPPER_ASSIGNMENTS
+except ImportError:
+ from google.appengine._internal.django.utils.functional import wraps, update_wrapper, WRAPPER_ASSIGNMENTS # Python 2.4 fallback.
+
+
+def method_decorator(decorator):
+ """
+ Converts a function decorator into a method decorator
+ """
+ def _dec(func):
+ def _wrapper(self, *args, **kwargs):
+ def bound_func(*args2, **kwargs2):
+ return func(self, *args2, **kwargs2)
+ # bound_func has the signature that 'decorator' expects i.e. no
+ # 'self' argument, but it is a closure over self so it can call
+ # 'func' correctly.
+ return decorator(bound_func)(*args, **kwargs)
+ return wraps(func)(_wrapper)
+ update_wrapper(_dec, decorator)
+ # Change the name to aid debugging.
+ _dec.__name__ = 'method_decorator(%s)' % decorator.__name__
+ return _dec
+
+
+def decorator_from_middleware_with_args(middleware_class):
+ """
+ Like decorator_from_middleware, but returns a function
+ that accepts the arguments to be passed to the middleware_class.
+ Use like::
+
+ cache_page = decorator_from_middleware_with_args(CacheMiddleware)
+ # ...
+
+ @cache_page(3600)
+ def my_view(request):
+ # ...
+ """
+ return make_middleware_decorator(middleware_class)
+
+
+def decorator_from_middleware(middleware_class):
+ """
+ Given a middleware class (not an instance), returns a view decorator. This
+ lets you use middleware functionality on a per-view basis. The middleware
+ is created with no params passed.
+ """
+ return make_middleware_decorator(middleware_class)()
+
+
+def available_attrs(fn):
+ """
+ Return the list of functools-wrappable attributes on a callable.
+ This is required as a workaround for http://bugs.python.org/issue3445.
+ """
+ return tuple(a for a in WRAPPER_ASSIGNMENTS if hasattr(fn, a))
+
+
+def make_middleware_decorator(middleware_class):
+ def _make_decorator(*m_args, **m_kwargs):
+ middleware = middleware_class(*m_args, **m_kwargs)
+ def _decorator(view_func):
+ def _wrapped_view(request, *args, **kwargs):
+ if hasattr(middleware, 'process_request'):
+ result = middleware.process_request(request)
+ if result is not None:
+ return result
+ if hasattr(middleware, 'process_view'):
+ result = middleware.process_view(request, view_func, args, kwargs)
+ if result is not None:
+ return result
+ try:
+ response = view_func(request, *args, **kwargs)
+ except Exception, e:
+ if hasattr(middleware, 'process_exception'):
+ result = middleware.process_exception(request, e)
+ if result is not None:
+ return result
+ raise
+ if hasattr(middleware, 'process_response'):
+ result = middleware.process_response(request, response)
+ if result is not None:
+ return result
+ return response
+ return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
+ return _decorator
+ return _make_decorator
diff --git a/google/appengine/_internal/django/utils/encoding.py b/google/appengine/_internal/django/utils/encoding.py
new file mode 100644
index 0000000..670c533
--- /dev/null
+++ b/google/appengine/_internal/django/utils/encoding.py
@@ -0,0 +1,180 @@
+import types
+import urllib
+import locale
+import datetime
+import codecs
+from decimal import Decimal
+
+from google.appengine._internal.django.utils.functional import Promise
+
+class DjangoUnicodeDecodeError(UnicodeDecodeError):
+ def __init__(self, obj, *args):
+ self.obj = obj
+ UnicodeDecodeError.__init__(self, *args)
+
+ def __str__(self):
+ original = UnicodeDecodeError.__str__(self)
+ return '%s. You passed in %r (%s)' % (original, self.obj,
+ type(self.obj))
+
+class StrAndUnicode(object):
+ """
+ A class whose __str__ returns its __unicode__ as a UTF-8 bytestring.
+
+ Useful as a mix-in.
+ """
+ def __str__(self):
+ return self.__unicode__().encode('utf-8')
+
+def smart_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):
+ """
+ Returns a unicode object representing 's'. Treats bytestrings using the
+ 'encoding' codec.
+
+ If strings_only is True, don't convert (some) non-string-like objects.
+ """
+ if isinstance(s, Promise):
+ # The input is the result of a gettext_lazy() call.
+ return s
+ return force_unicode(s, encoding, strings_only, errors)
+
+def is_protected_type(obj):
+ """Determine if the object instance is of a protected type.
+
+ Objects of protected types are preserved as-is when passed to
+ force_unicode(strings_only=True).
+ """
+ return isinstance(obj, (
+ types.NoneType,
+ int, long,
+ datetime.datetime, datetime.date, datetime.time,
+ float, Decimal)
+ )
+
+def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):
+ """
+ Similar to smart_unicode, except that lazy instances are resolved to
+ strings, rather than kept as lazy objects.
+
+ If strings_only is True, don't convert (some) non-string-like objects.
+ """
+ if strings_only and is_protected_type(s):
+ return s
+ try:
+ if not isinstance(s, basestring,):
+ if hasattr(s, '__unicode__'):
+ s = unicode(s)
+ else:
+ try:
+ s = unicode(str(s), encoding, errors)
+ except UnicodeEncodeError:
+ if not isinstance(s, Exception):
+ raise
+ # If we get to here, the caller has passed in an Exception
+ # subclass populated with non-ASCII data without special
+ # handling to display as a string. We need to handle this
+ # without raising a further exception. We do an
+ # approximation to what the Exception's standard str()
+ # output should be.
+ s = ' '.join([force_unicode(arg, encoding, strings_only,
+ errors) for arg in s])
+ elif not isinstance(s, unicode):
+ # Note: We use .decode() here, instead of unicode(s, encoding,
+ # errors), so that if s is a SafeString, it ends up being a
+ # SafeUnicode at the end.
+ s = s.decode(encoding, errors)
+ except UnicodeDecodeError, e:
+ if not isinstance(s, Exception):
+ raise DjangoUnicodeDecodeError(s, *e.args)
+ else:
+ # If we get to here, the caller has passed in an Exception
+ # subclass populated with non-ASCII bytestring data without a
+ # working unicode method. Try to handle this without raising a
+ # further exception by individually forcing the exception args
+ # to unicode.
+ s = ' '.join([force_unicode(arg, encoding, strings_only,
+ errors) for arg in s])
+ return s
+
+def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
+ """
+ Returns a bytestring version of 's', encoded as specified in 'encoding'.
+
+ If strings_only is True, don't convert (some) non-string-like objects.
+ """
+ if strings_only and isinstance(s, (types.NoneType, int)):
+ return s
+ if isinstance(s, Promise):
+ return unicode(s).encode(encoding, errors)
+ elif not isinstance(s, basestring):
+ try:
+ return str(s)
+ except UnicodeEncodeError:
+ if isinstance(s, Exception):
+ # An Exception subclass containing non-ASCII data that doesn't
+ # know how to print itself properly. We shouldn't raise a
+ # further exception.
+ return ' '.join([smart_str(arg, encoding, strings_only,
+ errors) for arg in s])
+ return unicode(s).encode(encoding, errors)
+ elif isinstance(s, unicode):
+ return s.encode(encoding, errors)
+ elif s and encoding != 'utf-8':
+ return s.decode('utf-8', errors).encode(encoding, errors)
+ else:
+ return s
+
+def iri_to_uri(iri):
+ """
+ Convert an Internationalized Resource Identifier (IRI) portion to a URI
+ portion that is suitable for inclusion in a URL.
+
+ This is the algorithm from section 3.1 of RFC 3987. However, since we are
+ assuming input is either UTF-8 or unicode already, we can simplify things a
+ little from the full method.
+
+ Returns an ASCII string containing the encoded result.
+ """
+ # The list of safe characters here is constructed from the "reserved" and
+ # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986:
+ # reserved = gen-delims / sub-delims
+ # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
+ # sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+ # / "*" / "+" / "," / ";" / "="
+ # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+ # Of the unreserved characters, urllib.quote already considers all but
+ # the ~ safe.
+ # The % character is also added to the list of safe characters here, as the
+ # end of section 3.1 of RFC 3987 specifically mentions that % must not be
+ # converted.
+ if iri is None:
+ return iri
+ return urllib.quote(smart_str(iri), safe="/#%[]=:;$&()+,!?*@'~")
+
+def filepath_to_uri(path):
+ """Convert an file system path to a URI portion that is suitable for
+ inclusion in a URL.
+
+ We are assuming input is either UTF-8 or unicode already.
+
+ This method will encode certain chars that would normally be recognized as
+ special chars for URIs. Note that this method does not encode the '
+ character, as it is a valid character within URIs. See
+ encodeURIComponent() JavaScript function for more details.
+
+ Returns an ASCII string containing the encoded result.
+ """
+ if path is None:
+ return path
+ # I know about `os.sep` and `os.altsep` but I want to leave
+ # some flexibility for hardcoding separators.
+ return urllib.quote(smart_str(path).replace("\\", "/"), safe="/~!*()'")
+
+# The encoding of the default system locale but falls back to the
+# given fallback encoding if the encoding is unsupported by python or could
+# not be determined. See tickets #10335 and #5846
+try:
+ DEFAULT_LOCALE_ENCODING = locale.getdefaultlocale()[1] or 'ascii'
+ codecs.lookup(DEFAULT_LOCALE_ENCODING)
+except:
+ DEFAULT_LOCALE_ENCODING = 'ascii'
diff --git a/google/appengine/_internal/django/utils/feedgenerator.py b/google/appengine/_internal/django/utils/feedgenerator.py
new file mode 100644
index 0000000..d84a9e0
--- /dev/null
+++ b/google/appengine/_internal/django/utils/feedgenerator.py
@@ -0,0 +1,372 @@
+"""
+Syndication feed generation library -- used for generating RSS, etc.
+
+Sample usage:
+
+>>> from google.appengine._internal.django.utils import feedgenerator
+>>> feed = feedgenerator.Rss201rev2Feed(
+... title=u"Poynter E-Media Tidbits",
+... link=u"http://www.poynter.org/column.asp?id=31",
+... description=u"A group Weblog by the sharpest minds in online media/journalism/publishing.",
+... language=u"en",
+... )
+>>> feed.add_item(
+... title="Hello",
+... link=u"http://www.holovaty.com/test/",
+... description="Testing."
+... )
+>>> fp = open('test.rss', 'w')
+>>> feed.write(fp, 'utf-8')
+>>> fp.close()
+
+For definitions of the different versions of RSS, see:
+http://diveintomark.org/archives/2004/02/04/incompatible-rss
+"""
+
+import datetime
+import urlparse
+from google.appengine._internal.django.utils.xmlutils import SimplerXMLGenerator
+from google.appengine._internal.django.utils.encoding import force_unicode, iri_to_uri
+
+def rfc2822_date(date):
+ # We do this ourselves to be timezone aware, email.Utils is not tz aware.
+ if date.tzinfo:
+ time_str = date.strftime('%a, %d %b %Y %H:%M:%S ')
+ offset = date.tzinfo.utcoffset(date)
+ timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+ hour, minute = divmod(timezone, 60)
+ return time_str + "%+03d%02d" % (hour, minute)
+ else:
+ return date.strftime('%a, %d %b %Y %H:%M:%S -0000')
+
+def rfc3339_date(date):
+ if date.tzinfo:
+ time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
+ offset = date.tzinfo.utcoffset(date)
+ timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
+ hour, minute = divmod(timezone, 60)
+ return time_str + "%+03d:%02d" % (hour, minute)
+ else:
+ return date.strftime('%Y-%m-%dT%H:%M:%SZ')
+
+def get_tag_uri(url, date):
+ """
+ Creates a TagURI.
+
+ See http://diveintomark.org/archives/2004/05/28/howto-atom-id
+ """
+ url_split = urlparse.urlparse(url)
+
+ # Python 2.4 didn't have named attributes on split results or the hostname.
+ hostname = getattr(url_split, 'hostname', url_split[1].split(':')[0])
+ path = url_split[2]
+ fragment = url_split[5]
+
+ d = ''
+ if date is not None:
+ d = ',%s' % date.strftime('%Y-%m-%d')
+ return u'tag:%s%s:%s/%s' % (hostname, d, path, fragment)
+
+class SyndicationFeed(object):
+ "Base class for all syndication feeds. Subclasses should provide write()"
+ def __init__(self, title, link, description, language=None, author_email=None,
+ author_name=None, author_link=None, subtitle=None, categories=None,
+ feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
+ to_unicode = lambda s: force_unicode(s, strings_only=True)
+ if categories:
+ categories = [force_unicode(c) for c in categories]
+ if ttl is not None:
+ # Force ints to unicode
+ ttl = force_unicode(ttl)
+ self.feed = {
+ 'title': to_unicode(title),
+ 'link': iri_to_uri(link),
+ 'description': to_unicode(description),
+ 'language': to_unicode(language),
+ 'author_email': to_unicode(author_email),
+ 'author_name': to_unicode(author_name),
+ 'author_link': iri_to_uri(author_link),
+ 'subtitle': to_unicode(subtitle),
+ 'categories': categories or (),
+ 'feed_url': iri_to_uri(feed_url),
+ 'feed_copyright': to_unicode(feed_copyright),
+ 'id': feed_guid or link,
+ 'ttl': ttl,
+ }
+ self.feed.update(kwargs)
+ self.items = []
+
+ def add_item(self, title, link, description, author_email=None,
+ author_name=None, author_link=None, pubdate=None, comments=None,
+ unique_id=None, enclosure=None, categories=(), item_copyright=None,
+ ttl=None, **kwargs):
+ """
+ Adds an item to the feed. All args are expected to be Python Unicode
+ objects except pubdate, which is a datetime.datetime object, and
+ enclosure, which is an instance of the Enclosure class.
+ """
+ to_unicode = lambda s: force_unicode(s, strings_only=True)
+ if categories:
+ categories = [to_unicode(c) for c in categories]
+ if ttl is not None:
+ # Force ints to unicode
+ ttl = force_unicode(ttl)
+ item = {
+ 'title': to_unicode(title),
+ 'link': iri_to_uri(link),
+ 'description': to_unicode(description),
+ 'author_email': to_unicode(author_email),
+ 'author_name': to_unicode(author_name),
+ 'author_link': iri_to_uri(author_link),
+ 'pubdate': pubdate,
+ 'comments': to_unicode(comments),
+ 'unique_id': to_unicode(unique_id),
+ 'enclosure': enclosure,
+ 'categories': categories or (),
+ 'item_copyright': to_unicode(item_copyright),
+ 'ttl': ttl,
+ }
+ item.update(kwargs)
+ self.items.append(item)
+
+ def num_items(self):
+ return len(self.items)
+
+ def root_attributes(self):
+ """
+ Return extra attributes to place on the root (i.e. feed/channel) element.
+ Called from write().
+ """
+ return {}
+
+ def add_root_elements(self, handler):
+ """
+ Add elements in the root (i.e. feed/channel) element. Called
+ from write().
+ """
+ pass
+
+ def item_attributes(self, item):
+ """
+ Return extra attributes to place on each item (i.e. item/entry) element.
+ """
+ return {}
+
+ def add_item_elements(self, handler, item):
+ """
+ Add elements on each item (i.e. item/entry) element.
+ """
+ pass
+
+ def write(self, outfile, encoding):
+ """
+ Outputs the feed in the given encoding to outfile, which is a file-like
+ object. Subclasses should override this.
+ """
+ raise NotImplementedError
+
+ def writeString(self, encoding):
+ """
+ Returns the feed in the given encoding as a string.
+ """
+ from StringIO import StringIO
+ s = StringIO()
+ self.write(s, encoding)
+ return s.getvalue()
+
+ def latest_post_date(self):
+ """
+ Returns the latest item's pubdate. If none of them have a pubdate,
+ this returns the current date/time.
+ """
+ updates = [i['pubdate'] for i in self.items if i['pubdate'] is not None]
+ if len(updates) > 0:
+ updates.sort()
+ return updates[-1]
+ else:
+ return datetime.datetime.now()
+
+class Enclosure(object):
+ "Represents an RSS enclosure"
+ def __init__(self, url, length, mime_type):
+ "All args are expected to be Python Unicode objects"
+ self.length, self.mime_type = length, mime_type
+ self.url = iri_to_uri(url)
+
+class RssFeed(SyndicationFeed):
+ mime_type = 'application/rss+xml'
+ def write(self, outfile, encoding):
+ handler = SimplerXMLGenerator(outfile, encoding)
+ handler.startDocument()
+ handler.startElement(u"rss", self.rss_attributes())
+ handler.startElement(u"channel", self.root_attributes())
+ self.add_root_elements(handler)
+ self.write_items(handler)
+ self.endChannelElement(handler)
+ handler.endElement(u"rss")
+
+ def rss_attributes(self):
+ return {u"version": self._version,
+ u"xmlns:atom": u"http://www.w3.org/2005/Atom"}
+
+ def write_items(self, handler):
+ for item in self.items:
+ handler.startElement(u'item', self.item_attributes(item))
+ self.add_item_elements(handler, item)
+ handler.endElement(u"item")
+
+ def add_root_elements(self, handler):
+ handler.addQuickElement(u"title", self.feed['title'])
+ handler.addQuickElement(u"link", self.feed['link'])
+ handler.addQuickElement(u"description", self.feed['description'])
+ handler.addQuickElement(u"atom:link", None, {u"rel": u"self", u"href": self.feed['feed_url']})
+ if self.feed['language'] is not None:
+ handler.addQuickElement(u"language", self.feed['language'])
+ for cat in self.feed['categories']:
+ handler.addQuickElement(u"category", cat)
+ if self.feed['feed_copyright'] is not None:
+ handler.addQuickElement(u"copyright", self.feed['feed_copyright'])
+ handler.addQuickElement(u"lastBuildDate", rfc2822_date(self.latest_post_date()).decode('utf-8'))
+ if self.feed['ttl'] is not None:
+ handler.addQuickElement(u"ttl", self.feed['ttl'])
+
+ def endChannelElement(self, handler):
+ handler.endElement(u"channel")
+
+class RssUserland091Feed(RssFeed):
+ _version = u"0.91"
+ def add_item_elements(self, handler, item):
+ handler.addQuickElement(u"title", item['title'])
+ handler.addQuickElement(u"link", item['link'])
+ if item['description'] is not None:
+ handler.addQuickElement(u"description", item['description'])
+
+class Rss201rev2Feed(RssFeed):
+ # Spec: http://blogs.law.harvard.edu/tech/rss
+ _version = u"2.0"
+ def add_item_elements(self, handler, item):
+ handler.addQuickElement(u"title", item['title'])
+ handler.addQuickElement(u"link", item['link'])
+ if item['description'] is not None:
+ handler.addQuickElement(u"description", item['description'])
+
+ # Author information.
+ if item["author_name"] and item["author_email"]:
+ handler.addQuickElement(u"author", "%s (%s)" % (item['author_email'], item['author_name']))
+ elif item["author_email"]:
+ handler.addQuickElement(u"author", item["author_email"])
+ elif item["author_name"]:
+ handler.addQuickElement(u"dc:creator", item["author_name"], {u"xmlns:dc": u"http://purl.org/dc/elements/1.1/"})
+
+ if item['pubdate'] is not None:
+ handler.addQuickElement(u"pubDate", rfc2822_date(item['pubdate']).decode('utf-8'))
+ if item['comments'] is not None:
+ handler.addQuickElement(u"comments", item['comments'])
+ if item['unique_id'] is not None:
+ handler.addQuickElement(u"guid", item['unique_id'])
+ if item['ttl'] is not None:
+ handler.addQuickElement(u"ttl", item['ttl'])
+
+ # Enclosure.
+ if item['enclosure'] is not None:
+ handler.addQuickElement(u"enclosure", '',
+ {u"url": item['enclosure'].url, u"length": item['enclosure'].length,
+ u"type": item['enclosure'].mime_type})
+
+ # Categories.
+ for cat in item['categories']:
+ handler.addQuickElement(u"category", cat)
+
+class Atom1Feed(SyndicationFeed):
+ # Spec: http://atompub.org/2005/07/11/draft-ietf-atompub-format-10.html
+ mime_type = 'application/atom+xml'
+ ns = u"http://www.w3.org/2005/Atom"
+
+ def write(self, outfile, encoding):
+ handler = SimplerXMLGenerator(outfile, encoding)
+ handler.startDocument()
+ handler.startElement(u'feed', self.root_attributes())
+ self.add_root_elements(handler)
+ self.write_items(handler)
+ handler.endElement(u"feed")
+
+ def root_attributes(self):
+ if self.feed['language'] is not None:
+ return {u"xmlns": self.ns, u"xml:lang": self.feed['language']}
+ else:
+ return {u"xmlns": self.ns}
+
+ def add_root_elements(self, handler):
+ handler.addQuickElement(u"title", self.feed['title'])
+ handler.addQuickElement(u"link", "", {u"rel": u"alternate", u"href": self.feed['link']})
+ if self.feed['feed_url'] is not None:
+ handler.addQuickElement(u"link", "", {u"rel": u"self", u"href": self.feed['feed_url']})
+ handler.addQuickElement(u"id", self.feed['id'])
+ handler.addQuickElement(u"updated", rfc3339_date(self.latest_post_date()).decode('utf-8'))
+ if self.feed['author_name'] is not None:
+ handler.startElement(u"author", {})
+ handler.addQuickElement(u"name", self.feed['author_name'])
+ if self.feed['author_email'] is not None:
+ handler.addQuickElement(u"email", self.feed['author_email'])
+ if self.feed['author_link'] is not None:
+ handler.addQuickElement(u"uri", self.feed['author_link'])
+ handler.endElement(u"author")
+ if self.feed['subtitle'] is not None:
+ handler.addQuickElement(u"subtitle", self.feed['subtitle'])
+ for cat in self.feed['categories']:
+ handler.addQuickElement(u"category", "", {u"term": cat})
+ if self.feed['feed_copyright'] is not None:
+ handler.addQuickElement(u"rights", self.feed['feed_copyright'])
+
+ def write_items(self, handler):
+ for item in self.items:
+ handler.startElement(u"entry", self.item_attributes(item))
+ self.add_item_elements(handler, item)
+ handler.endElement(u"entry")
+
+ def add_item_elements(self, handler, item):
+ handler.addQuickElement(u"title", item['title'])
+ handler.addQuickElement(u"link", u"", {u"href": item['link'], u"rel": u"alternate"})
+ if item['pubdate'] is not None:
+ handler.addQuickElement(u"updated", rfc3339_date(item['pubdate']).decode('utf-8'))
+
+ # Author information.
+ if item['author_name'] is not None:
+ handler.startElement(u"author", {})
+ handler.addQuickElement(u"name", item['author_name'])
+ if item['author_email'] is not None:
+ handler.addQuickElement(u"email", item['author_email'])
+ if item['author_link'] is not None:
+ handler.addQuickElement(u"uri", item['author_link'])
+ handler.endElement(u"author")
+
+ # Unique ID.
+ if item['unique_id'] is not None:
+ unique_id = item['unique_id']
+ else:
+ unique_id = get_tag_uri(item['link'], item['pubdate'])
+ handler.addQuickElement(u"id", unique_id)
+
+ # Summary.
+ if item['description'] is not None:
+ handler.addQuickElement(u"summary", item['description'], {u"type": u"html"})
+
+ # Enclosure.
+ if item['enclosure'] is not None:
+ handler.addQuickElement(u"link", '',
+ {u"rel": u"enclosure",
+ u"href": item['enclosure'].url,
+ u"length": item['enclosure'].length,
+ u"type": item['enclosure'].mime_type})
+
+ # Categories.
+ for cat in item['categories']:
+ handler.addQuickElement(u"category", u"", {u"term": cat})
+
+ # Rights.
+ if item['item_copyright'] is not None:
+ handler.addQuickElement(u"rights", item['item_copyright'])
+
+# This isolates the decision of what the system default is, so calling code can
+# do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed".
+DefaultFeed = Rss201rev2Feed
diff --git a/google/appengine/_internal/django/utils/formats.py b/google/appengine/_internal/django/utils/formats.py
new file mode 100644
index 0000000..6473633
--- /dev/null
+++ b/google/appengine/_internal/django/utils/formats.py
@@ -0,0 +1,159 @@
+import decimal
+import datetime
+
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils.translation import get_language, to_locale, check_for_language
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.utils.encoding import smart_str
+from google.appengine._internal.django.utils import dateformat, numberformat, datetime_safe
+from google.appengine._internal.django.utils.safestring import mark_safe
+
+# format_cache is a mapping from (format_type, lang) to the format string.
+# By using the cache, it is possible to avoid running get_format_modules
+# repeatedly.
+_format_cache = {}
+_format_modules_cache = {}
+
+def reset_format_cache():
+ """Clear any cached formats.
+
+ This method is provided primarily for testing purposes,
+ so that the effects of cached formats can be removed.
+ """
+ global _format_cache, _format_modules_cache
+ _format_cache = {}
+ _format_modules_cache = {}
+
+def iter_format_modules(lang):
+ """
+ Does the heavy lifting of finding format modules.
+ """
+ if check_for_language(lang):
+ format_locations = ['django.conf.locale.%s']
+ if settings.FORMAT_MODULE_PATH:
+ format_locations.append(settings.FORMAT_MODULE_PATH + '.%s')
+ format_locations.reverse()
+ locale = to_locale(lang)
+ locales = [locale]
+ if '_' in locale:
+ locales.append(locale.split('_')[0])
+ for location in format_locations:
+ for loc in locales:
+ try:
+ yield import_module('.formats', location % loc)
+ except ImportError:
+ pass
+
+def get_format_modules(reverse=False):
+ """
+ Returns a list of the format modules found
+ """
+ lang = get_language()
+ modules = _format_modules_cache.setdefault(lang, list(iter_format_modules(lang)))
+ if reverse:
+ return list(reversed(modules))
+ return modules
+
+def get_format(format_type):
+ """
+ For a specific format type, returns the format for the current
+ language (locale), defaults to the format in the settings.
+ format_type is the name of the format, e.g. 'DATE_FORMAT'
+ """
+ format_type = smart_str(format_type)
+ if settings.USE_L10N:
+ cache_key = (format_type, get_language())
+ try:
+ return _format_cache[cache_key] or getattr(settings, format_type)
+ except KeyError:
+ for module in get_format_modules():
+ try:
+ val = getattr(module, format_type)
+ _format_cache[cache_key] = val
+ return val
+ except AttributeError:
+ pass
+ _format_cache[cache_key] = None
+ return getattr(settings, format_type)
+
+def date_format(value, format=None):
+ """
+ Formats a datetime.date or datetime.datetime object using a
+ localizable format
+ """
+ return dateformat.format(value, get_format(format or 'DATE_FORMAT'))
+
+def time_format(value, format=None):
+ """
+ Formats a datetime.time object using a localizable format
+ """
+ return dateformat.time_format(value, get_format(format or 'TIME_FORMAT'))
+
+def number_format(value, decimal_pos=None):
+ """
+ Formats a numeric value using localization settings
+ """
+ return numberformat.format(
+ value,
+ get_format('DECIMAL_SEPARATOR'),
+ decimal_pos,
+ get_format('NUMBER_GROUPING'),
+ get_format('THOUSAND_SEPARATOR'),
+ )
+
+def localize(value):
+ """
+ Checks if value is a localizable type (date, number...) and returns it
+ formatted as a string using current locale format
+ """
+ if isinstance(value, bool):
+ return mark_safe(unicode(value))
+ elif isinstance(value, (decimal.Decimal, float, int, long)):
+ return number_format(value)
+ elif isinstance(value, datetime.datetime):
+ return date_format(value, 'DATETIME_FORMAT')
+ elif isinstance(value, datetime.date):
+ return date_format(value)
+ elif isinstance(value, datetime.time):
+ return time_format(value, 'TIME_FORMAT')
+ else:
+ return value
+
+def localize_input(value, default=None):
+ """
+ Checks if an input value is a localizable type and returns it
+ formatted with the appropriate formatting string of the current locale.
+ """
+ if isinstance(value, (decimal.Decimal, float, int, long)):
+ return number_format(value)
+ if isinstance(value, datetime.datetime):
+ value = datetime_safe.new_datetime(value)
+ format = smart_str(default or get_format('DATETIME_INPUT_FORMATS')[0])
+ return value.strftime(format)
+ elif isinstance(value, datetime.date):
+ value = datetime_safe.new_date(value)
+ format = smart_str(default or get_format('DATE_INPUT_FORMATS')[0])
+ return value.strftime(format)
+ elif isinstance(value, datetime.time):
+ format = smart_str(default or get_format('TIME_INPUT_FORMATS')[0])
+ return value.strftime(format)
+ return value
+
+def sanitize_separators(value):
+ """
+ Sanitizes a value according to the current decimal and
+ thousand separator setting. Used with form field input.
+ """
+ if settings.USE_L10N:
+ decimal_separator = get_format('DECIMAL_SEPARATOR')
+ if isinstance(value, basestring):
+ parts = []
+ if decimal_separator in value:
+ value, decimals = value.split(decimal_separator, 1)
+ parts.append(decimals)
+ if settings.USE_THOUSAND_SEPARATOR:
+ parts.append(value.replace(get_format('THOUSAND_SEPARATOR'), ''))
+ else:
+ parts.append(value)
+ value = '.'.join(reversed(parts))
+ return value
diff --git a/google/appengine/_internal/django/utils/functional.py b/google/appengine/_internal/django/utils/functional.py
new file mode 100644
index 0000000..99739f0
--- /dev/null
+++ b/google/appengine/_internal/django/utils/functional.py
@@ -0,0 +1,367 @@
+# License for code in this file that was taken from Python 2.5.
+
+# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+# --------------------------------------------
+#
+# 1. This LICENSE AGREEMENT is between the Python Software Foundation
+# ("PSF"), and the Individual or Organization ("Licensee") accessing and
+# otherwise using this software ("Python") in source or binary form and
+# its associated documentation.
+#
+# 2. Subject to the terms and conditions of this License Agreement, PSF
+# hereby grants Licensee a nonexclusive, royalty-free, world-wide
+# license to reproduce, analyze, test, perform and/or display publicly,
+# prepare derivative works, distribute, and otherwise use Python
+# alone or in any derivative version, provided, however, that PSF's
+# License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
+# 2001, 2002, 2003, 2004, 2005, 2006, 2007 Python Software Foundation;
+# All Rights Reserved" are retained in Python alone or in any derivative
+# version prepared by Licensee.
+#
+# 3. In the event Licensee prepares a derivative work that is based on
+# or incorporates Python or any part thereof, and wants to make
+# the derivative work available to others as provided herein, then
+# Licensee hereby agrees to include in any such work a brief summary of
+# the changes made to Python.
+#
+# 4. PSF is making Python available to Licensee on an "AS IS"
+# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+# INFRINGE ANY THIRD PARTY RIGHTS.
+#
+# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+#
+# 6. This License Agreement will automatically terminate upon a material
+# breach of its terms and conditions.
+#
+# 7. Nothing in this License Agreement shall be deemed to create any
+# relationship of agency, partnership, or joint venture between PSF and
+# Licensee. This License Agreement does not grant permission to use PSF
+# trademarks or trade name in a trademark sense to endorse or promote
+# products or services of Licensee, or any third party.
+#
+# 8. By copying, installing or otherwise using Python, Licensee
+# agrees to be bound by the terms and conditions of this License
+# Agreement.
+
+
+def curry(_curried_func, *args, **kwargs):
+ def _curried(*moreargs, **morekwargs):
+ return _curried_func(*(args+moreargs), **dict(kwargs, **morekwargs))
+ return _curried
+
+### Begin from Python 2.5 functools.py ########################################
+
+# Summary of changes made to the Python 2.5 code below:
+# * swapped ``partial`` for ``curry`` to maintain backwards-compatibility
+# in Django.
+
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007 Python Software Foundation.
+# All Rights Reserved.
+
+###############################################################################
+
+# update_wrapper() and wraps() are tools to help write
+# wrapper functions that can handle naive introspection
+
+WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
+WRAPPER_UPDATES = ('__dict__',)
+def update_wrapper(wrapper,
+ wrapped,
+ assigned = WRAPPER_ASSIGNMENTS,
+ updated = WRAPPER_UPDATES):
+ """Update a wrapper function to look like the wrapped function
+
+ wrapper is the function to be updated
+ wrapped is the original function
+ assigned is a tuple naming the attributes assigned directly
+ from the wrapped function to the wrapper function (defaults to
+ functools.WRAPPER_ASSIGNMENTS)
+ updated is a tuple naming the attributes off the wrapper that
+ are updated with the corresponding attribute from the wrapped
+ function (defaults to functools.WRAPPER_UPDATES)
+ """
+ for attr in assigned:
+ setattr(wrapper, attr, getattr(wrapped, attr))
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr))
+ # Return the wrapper so this can be used as a decorator via curry()
+ return wrapper
+
+def wraps(wrapped,
+ assigned = WRAPPER_ASSIGNMENTS,
+ updated = WRAPPER_UPDATES):
+ """Decorator factory to apply update_wrapper() to a wrapper function
+
+ Returns a decorator that invokes update_wrapper() with the decorated
+ function as the wrapper argument and the arguments to wraps() as the
+ remaining arguments. Default arguments are as for update_wrapper().
+ This is a convenience function to simplify applying curry() to
+ update_wrapper().
+ """
+ return curry(update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+
+### End from Python 2.5 functools.py ##########################################
+
+def memoize(func, cache, num_args):
+ """
+ Wrap a function so that results for any argument tuple are stored in
+ 'cache'. Note that the args to the function must be usable as dictionary
+ keys.
+
+ Only the first num_args are considered when creating the key.
+ """
+ def wrapper(*args):
+ mem_args = args[:num_args]
+ if mem_args in cache:
+ return cache[mem_args]
+ result = func(*args)
+ cache[mem_args] = result
+ return result
+ return wraps(func)(wrapper)
+
+class Promise(object):
+ """
+ This is just a base class for the proxy class created in
+ the closure of the lazy function. It can be used to recognize
+ promises in code.
+ """
+ pass
+
+def lazy(func, *resultclasses):
+ """
+ Turns any callable into a lazy evaluated callable. You need to give result
+ classes or types -- at least one is needed so that the automatic forcing of
+ the lazy evaluation code is triggered. Results are not memoized; the
+ function is evaluated on every access.
+ """
+
+ class __proxy__(Promise):
+ """
+ Encapsulate a function call and act as a proxy for methods that are
+ called on the result of that function. The function is not evaluated
+ until one of the methods on the result is called.
+ """
+ __dispatch = None
+
+ def __init__(self, args, kw):
+ self.__func = func
+ self.__args = args
+ self.__kw = kw
+ if self.__dispatch is None:
+ self.__prepare_class__()
+
+ def __reduce__(self):
+ return (
+ _lazy_proxy_unpickle,
+ (self.__func, self.__args, self.__kw) + resultclasses
+ )
+
+ def __prepare_class__(cls):
+ cls.__dispatch = {}
+ for resultclass in resultclasses:
+ cls.__dispatch[resultclass] = {}
+ for (k, v) in resultclass.__dict__.items():
+ # All __promise__ return the same wrapper method, but they
+ # also do setup, inserting the method into the dispatch
+ # dict.
+ meth = cls.__promise__(resultclass, k, v)
+ if hasattr(cls, k):
+ continue
+ setattr(cls, k, meth)
+ cls._delegate_str = str in resultclasses
+ cls._delegate_unicode = unicode in resultclasses
+ assert not (cls._delegate_str and cls._delegate_unicode), "Cannot call lazy() with both str and unicode return types."
+ if cls._delegate_unicode:
+ cls.__unicode__ = cls.__unicode_cast
+ elif cls._delegate_str:
+ cls.__str__ = cls.__str_cast
+ __prepare_class__ = classmethod(__prepare_class__)
+
+ def __promise__(cls, klass, funcname, func):
+ # Builds a wrapper around some magic method and registers that magic
+ # method for the given type and method name.
+ def __wrapper__(self, *args, **kw):
+ # Automatically triggers the evaluation of a lazy value and
+ # applies the given magic method of the result type.
+ res = self.__func(*self.__args, **self.__kw)
+ for t in type(res).mro():
+ if t in self.__dispatch:
+ return self.__dispatch[t][funcname](res, *args, **kw)
+ raise TypeError("Lazy object returned unexpected type.")
+
+ if klass not in cls.__dispatch:
+ cls.__dispatch[klass] = {}
+ cls.__dispatch[klass][funcname] = func
+ return __wrapper__
+ __promise__ = classmethod(__promise__)
+
+ def __unicode_cast(self):
+ return self.__func(*self.__args, **self.__kw)
+
+ def __str_cast(self):
+ return str(self.__func(*self.__args, **self.__kw))
+
+ def __cmp__(self, rhs):
+ if self._delegate_str:
+ s = str(self.__func(*self.__args, **self.__kw))
+ elif self._delegate_unicode:
+ s = unicode(self.__func(*self.__args, **self.__kw))
+ else:
+ s = self.__func(*self.__args, **self.__kw)
+ if isinstance(rhs, Promise):
+ return -cmp(rhs, s)
+ else:
+ return cmp(s, rhs)
+
+ def __mod__(self, rhs):
+ if self._delegate_str:
+ return str(self) % rhs
+ elif self._delegate_unicode:
+ return unicode(self) % rhs
+ else:
+ raise AssertionError('__mod__ not supported for non-string types')
+
+ def __deepcopy__(self, memo):
+ # Instances of this class are effectively immutable. It's just a
+ # collection of functions. So we don't need to do anything
+ # complicated for copying.
+ memo[id(self)] = self
+ return self
+
+ def __wrapper__(*args, **kw):
+ # Creates the proxy object, instead of the actual value.
+ return __proxy__(args, kw)
+
+ return wraps(func)(__wrapper__)
+
+def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
+ return lazy(func, *resultclasses)(*args, **kwargs)
+
+def allow_lazy(func, *resultclasses):
+ """
+ A decorator that allows a function to be called with one or more lazy
+ arguments. If none of the args are lazy, the function is evaluated
+ immediately, otherwise a __proxy__ is returned that will evaluate the
+ function when needed.
+ """
+ def wrapper(*args, **kwargs):
+ for arg in list(args) + kwargs.values():
+ if isinstance(arg, Promise):
+ break
+ else:
+ return func(*args, **kwargs)
+ return lazy(func, *resultclasses)(*args, **kwargs)
+ return wraps(func)(wrapper)
+
+class LazyObject(object):
+ """
+ A wrapper for another class that can be used to delay instantiation of the
+ wrapped class.
+
+ By subclassing, you have the opportunity to intercept and alter the
+ instantiation. If you don't need to do that, use SimpleLazyObject.
+ """
+ def __init__(self):
+ self._wrapped = None
+
+ def __getattr__(self, name):
+ if self._wrapped is None:
+ self._setup()
+ return getattr(self._wrapped, name)
+
+ def __setattr__(self, name, value):
+ if name == "_wrapped":
+ # Assign to __dict__ to avoid infinite __setattr__ loops.
+ self.__dict__["_wrapped"] = value
+ else:
+ if self._wrapped is None:
+ self._setup()
+ setattr(self._wrapped, name, value)
+
+ def __delattr__(self, name):
+ if name == "_wrapped":
+ raise TypeError("can't delete _wrapped.")
+ if self._wrapped is None:
+ self._setup()
+ delattr(self._wrapped, name)
+
+ def _setup(self):
+ """
+ Must be implemented by subclasses to initialise the wrapped object.
+ """
+ raise NotImplementedError
+
+ # introspection support:
+ __members__ = property(lambda self: self.__dir__())
+
+ def __dir__(self):
+ if self._wrapped is None:
+ self._setup()
+ return dir(self._wrapped)
+
+class SimpleLazyObject(LazyObject):
+ """
+ A lazy object initialised from any function.
+
+ Designed for compound objects of unknown type. For builtins or objects of
+ known type, use django.utils.functional.lazy.
+ """
+ def __init__(self, func):
+ """
+ Pass in a callable that returns the object to be wrapped.
+
+ If copies are made of the resulting SimpleLazyObject, which can happen
+ in various circumstances within Django, then you must ensure that the
+ callable can be safely run more than once and will return the same
+ value.
+ """
+ self.__dict__['_setupfunc'] = func
+ # For some reason, we have to inline LazyObject.__init__ here to avoid
+ # recursion
+ self._wrapped = None
+
+ def __str__(self):
+ if self._wrapped is None: self._setup()
+ return str(self._wrapped)
+
+ def __unicode__(self):
+ if self._wrapped is None: self._setup()
+ return unicode(self._wrapped)
+
+ def __deepcopy__(self, memo):
+ if self._wrapped is None:
+ # We have to use SimpleLazyObject, not self.__class__, because the
+ # latter is proxied.
+ result = SimpleLazyObject(self._setupfunc)
+ memo[id(self)] = result
+ return result
+ else:
+ # Changed to use deepcopy from copycompat, instead of copy
+ # For Python 2.4.
+ from google.appengine._internal.django.utils.copycompat import deepcopy
+ return deepcopy(self._wrapped, memo)
+
+ # Need to pretend to be the wrapped class, for the sake of objects that care
+ # about this (especially in equality tests)
+ def __get_class(self):
+ if self._wrapped is None: self._setup()
+ return self._wrapped.__class__
+ __class__ = property(__get_class)
+
+ def __eq__(self, other):
+ if self._wrapped is None: self._setup()
+ return self._wrapped == other
+
+ def __hash__(self):
+ if self._wrapped is None: self._setup()
+ return hash(self._wrapped)
+
+ def _setup(self):
+ self._wrapped = self._setupfunc()
diff --git a/google/appengine/_internal/django/utils/hashcompat.py b/google/appengine/_internal/django/utils/hashcompat.py
new file mode 100644
index 0000000..4d9b76f
--- /dev/null
+++ b/google/appengine/_internal/django/utils/hashcompat.py
@@ -0,0 +1,20 @@
+"""
+The md5 and sha modules are deprecated since Python 2.5, replaced by the
+hashlib module containing both hash algorithms. Here, we provide a common
+interface to the md5 and sha constructors, depending on system version.
+"""
+
+import sys
+if sys.version_info >= (2, 5):
+ import hashlib
+ md5_constructor = hashlib.md5
+ md5_hmac = md5_constructor
+ sha_constructor = hashlib.sha1
+ sha_hmac = sha_constructor
+else:
+ import md5
+ md5_constructor = md5.new
+ md5_hmac = md5
+ import sha
+ sha_constructor = sha.new
+ sha_hmac = sha
diff --git a/google/appengine/_internal/django/utils/html.py b/google/appengine/_internal/django/utils/html.py
new file mode 100644
index 0000000..ad444d1
--- /dev/null
+++ b/google/appengine/_internal/django/utils/html.py
@@ -0,0 +1,189 @@
+"""HTML utilities suitable for global use."""
+
+import re
+import string
+
+from google.appengine._internal.django.utils.safestring import SafeData, mark_safe
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.functional import allow_lazy
+from google.appengine._internal.django.utils.http import urlquote
+
+# Configuration for urlize() function.
+LEADING_PUNCTUATION = ['(', '<', '<']
+TRAILING_PUNCTUATION = ['.', ',', ')', '>', '\n', '>']
+
+# List of possible strings used for bullets in bulleted lists.
+DOTS = ['·', '*', '\xe2\x80\xa2', '•', '•', '•']
+
+unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
+word_split_re = re.compile(r'(\s+)')
+punctuation_re = re.compile('^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % ('|'.join([re.escape(x) for x in LEADING_PUNCTUATION]),
+ '|'.join([re.escape(x) for x in TRAILING_PUNCTUATION])))
+simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
+link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
+html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
+hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL)
+trailing_empty_content_re = re.compile(r'(?:<p>(?: |\s|<br \/>)*?</p>\s*)+\Z')
+del x # Temporary variable
+
+def escape(html):
+ """
+ Returns the given HTML with ampersands, quotes and angle brackets encoded.
+ """
+ return mark_safe(force_unicode(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
+escape = allow_lazy(escape, unicode)
+
+_base_js_escapes = (
+ ('\\', r'\u005C'),
+ ('\'', r'\u0027'),
+ ('"', r'\u0022'),
+ ('>', r'\u003E'),
+ ('<', r'\u003C'),
+ ('&', r'\u0026'),
+ ('=', r'\u003D'),
+ ('-', r'\u002D'),
+ (';', r'\u003B'),
+ (u'\u2028', r'\u2028'),
+ (u'\u2029', r'\u2029')
+)
+
+# Escape every ASCII character with a value less than 32.
+_js_escapes = (_base_js_escapes +
+ tuple([('%c' % z, '\\u%04X' % z) for z in range(32)]))
+
+def escapejs(value):
+ """Hex encodes characters for use in JavaScript strings."""
+ for bad, good in _js_escapes:
+ value = mark_safe(force_unicode(value).replace(bad, good))
+ return value
+escapejs = allow_lazy(escapejs, unicode)
+
+def conditional_escape(html):
+ """
+ Similar to escape(), except that it doesn't operate on pre-escaped strings.
+ """
+ if isinstance(html, SafeData):
+ return html
+ else:
+ return escape(html)
+
+def linebreaks(value, autoescape=False):
+ """Converts newlines into <p> and <br />s."""
+ value = re.sub(r'\r\n|\r|\n', '\n', force_unicode(value)) # normalize newlines
+ paras = re.split('\n{2,}', value)
+ if autoescape:
+ paras = [u'<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras]
+ else:
+ paras = [u'<p>%s</p>' % p.replace('\n', '<br />') for p in paras]
+ return u'\n\n'.join(paras)
+linebreaks = allow_lazy(linebreaks, unicode)
+
+def strip_tags(value):
+ """Returns the given HTML with all tags stripped."""
+ return re.sub(r'<[^>]*?>', '', force_unicode(value))
+strip_tags = allow_lazy(strip_tags)
+
+def strip_spaces_between_tags(value):
+ """Returns the given HTML with spaces between tags removed."""
+ return re.sub(r'>\s+<', '><', force_unicode(value))
+strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, unicode)
+
+def strip_entities(value):
+ """Returns the given HTML with all entities (&something;) stripped."""
+ return re.sub(r'&(?:\w+|#\d+);', '', force_unicode(value))
+strip_entities = allow_lazy(strip_entities, unicode)
+
+def fix_ampersands(value):
+ """Returns the given HTML with all unencoded ampersands encoded correctly."""
+ return unencoded_ampersands_re.sub('&', force_unicode(value))
+fix_ampersands = allow_lazy(fix_ampersands, unicode)
+
+def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
+ """
+ Converts any URLs in text into clickable links.
+
+ Works on http://, https://, www. links and links ending in .org, .net or
+ .com. Links can have trailing punctuation (periods, commas, close-parens)
+ and leading punctuation (opening parens) and it'll still do the right
+ thing.
+
+ If trim_url_limit is not None, the URLs in link text longer than this limit
+ will truncated to trim_url_limit-3 characters and appended with an elipsis.
+
+ If nofollow is True, the URLs in link text will get a rel="nofollow"
+ attribute.
+
+ If autoescape is True, the link text and URLs will get autoescaped.
+ """
+ trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
+ safe_input = isinstance(text, SafeData)
+ words = word_split_re.split(force_unicode(text))
+ nofollow_attr = nofollow and ' rel="nofollow"' or ''
+ for i, word in enumerate(words):
+ match = None
+ if '.' in word or '@' in word or ':' in word:
+ match = punctuation_re.match(word)
+ if match:
+ lead, middle, trail = match.groups()
+ # Make URL we want to point to.
+ url = None
+ if middle.startswith('http://') or middle.startswith('https://'):
+ url = urlquote(middle, safe='/&=:;#?+*')
+ elif middle.startswith('www.') or ('@' not in middle and middle and middle[0] in string.ascii_letters + string.digits and (middle.endswith('.org') or middle.endswith('.net') or middle.endswith('.com'))):
+ url = urlquote('http://%s' % middle, safe='/&=:;#?+*')
+ elif '@' in middle and not ':' in middle and simple_email_re.match(middle):
+ url = 'mailto:%s' % middle
+ nofollow_attr = ''
+ # Make link.
+ if url:
+ trimmed = trim_url(middle)
+ if autoescape and not safe_input:
+ lead, trail = escape(lead), escape(trail)
+ url, trimmed = escape(url), escape(trimmed)
+ middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
+ words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
+ else:
+ if safe_input:
+ words[i] = mark_safe(word)
+ elif autoescape:
+ words[i] = escape(word)
+ elif safe_input:
+ words[i] = mark_safe(word)
+ elif autoescape:
+ words[i] = escape(word)
+ return u''.join(words)
+urlize = allow_lazy(urlize, unicode)
+
+def clean_html(text):
+ """
+ Clean the given HTML. Specifically, do the following:
+ * Convert <b> and <i> to <strong> and <em>.
+ * Encode all ampersands correctly.
+ * Remove all "target" attributes from <a> tags.
+ * Remove extraneous HTML, such as presentational tags that open and
+ immediately close and <br clear="all">.
+ * Convert hard-coded bullets into HTML unordered lists.
+ * Remove stuff like "<p> </p>", but only if it's at the
+ bottom of the text.
+ """
+ from google.appengine._internal.django.utils.text import normalize_newlines
+ text = normalize_newlines(force_unicode(text))
+ text = re.sub(r'<(/?)\s*b\s*>', '<\\1strong>', text)
+ text = re.sub(r'<(/?)\s*i\s*>', '<\\1em>', text)
+ text = fix_ampersands(text)
+ # Remove all target="" attributes from <a> tags.
+ text = link_target_attribute_re.sub('\\1', text)
+ # Trim stupid HTML such as <br clear="all">.
+ text = html_gunk_re.sub('', text)
+ # Convert hard-coded bullets into HTML unordered lists.
+ def replace_p_tags(match):
+ s = match.group().replace('</p>', '</li>')
+ for d in DOTS:
+ s = s.replace('<p>%s' % d, '<li>')
+ return u'<ul>\n%s\n</ul>' % s
+ text = hard_coded_bullets_re.sub(replace_p_tags, text)
+ # Remove stuff like "<p> </p>", but only if it's at the bottom
+ # of the text.
+ text = trailing_empty_content_re.sub('', text)
+ return text
+clean_html = allow_lazy(clean_html, unicode)
diff --git a/google/appengine/_internal/django/utils/http.py b/google/appengine/_internal/django/utils/http.py
new file mode 100644
index 0000000..2ee5a8e
--- /dev/null
+++ b/google/appengine/_internal/django/utils/http.py
@@ -0,0 +1,130 @@
+import re
+import sys
+import urllib
+from email.Utils import formatdate
+
+from google.appengine._internal.django.utils.encoding import smart_str, force_unicode
+from google.appengine._internal.django.utils.functional import allow_lazy
+
+ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
+
+def urlquote(url, safe='/'):
+ """
+ A version of Python's urllib.quote() function that can operate on unicode
+ strings. The url is first UTF-8 encoded before quoting. The returned string
+ can safely be used as part of an argument to a subsequent iri_to_uri() call
+ without double-quoting occurring.
+ """
+ return force_unicode(urllib.quote(smart_str(url), safe))
+
+urlquote = allow_lazy(urlquote, unicode)
+
+def urlquote_plus(url, safe=''):
+ """
+ A version of Python's urllib.quote_plus() function that can operate on
+ unicode strings. The url is first UTF-8 encoded before quoting. The
+ returned string can safely be used as part of an argument to a subsequent
+ iri_to_uri() call without double-quoting occurring.
+ """
+ return force_unicode(urllib.quote_plus(smart_str(url), safe))
+urlquote_plus = allow_lazy(urlquote_plus, unicode)
+
+def urlencode(query, doseq=0):
+ """
+ A version of Python's urllib.urlencode() function that can operate on
+ unicode strings. The parameters are first case to UTF-8 encoded strings and
+ then encoded as per normal.
+ """
+ if hasattr(query, 'items'):
+ query = query.items()
+ return urllib.urlencode(
+ [(smart_str(k),
+ isinstance(v, (list,tuple)) and [smart_str(i) for i in v] or smart_str(v))
+ for k, v in query],
+ doseq)
+
+def cookie_date(epoch_seconds=None):
+ """
+ Formats the time to ensure compatibility with Netscape's cookie standard.
+
+ Accepts a floating point number expressed in seconds since the epoch, in
+ UTC - such as that outputted by time.time(). If set to None, defaults to
+ the current time.
+
+ Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
+ """
+ rfcdate = formatdate(epoch_seconds)
+ return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
+
+def http_date(epoch_seconds=None):
+ """
+ Formats the time to match the RFC1123 date format as specified by HTTP
+ RFC2616 section 3.3.1.
+
+ Accepts a floating point number expressed in seconds since the epoch, in
+ UTC - such as that outputted by time.time(). If set to None, defaults to
+ the current time.
+
+ Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
+ """
+ rfcdate = formatdate(epoch_seconds)
+ return '%s GMT' % rfcdate[:25]
+
+# Base 36 functions: useful for generating compact URLs
+
+def base36_to_int(s):
+ """
+ Converts a base 36 string to an ``int``. Raises ``ValueError` if the
+ input won't fit into an int.
+ """
+ # To prevent overconsumption of server resources, reject any
+ # base36 string that is long than 13 base36 digits (13 digits
+ # is sufficient to base36-encode any 64-bit integer)
+ if len(s) > 13:
+ raise ValueError("Base36 input too large")
+ value = int(s, 36)
+ # ... then do a final check that the value will fit into an int.
+ if value > sys.maxint:
+ raise ValueError("Base36 input too large")
+ return value
+
+def int_to_base36(i):
+ """
+ Converts an integer to a base36 string
+ """
+ digits = "0123456789abcdefghijklmnopqrstuvwxyz"
+ factor = 0
+ # Find starting factor
+ while True:
+ factor += 1
+ if i < 36 ** factor:
+ factor -= 1
+ break
+ base36 = []
+ # Construct base36 representation
+ while factor >= 0:
+ j = 36 ** factor
+ base36.append(digits[i / j])
+ i = i % j
+ factor -= 1
+ return ''.join(base36)
+
+def parse_etags(etag_str):
+ """
+ Parses a string with one or several etags passed in If-None-Match and
+ If-Match headers by the rules in RFC 2616. Returns a list of etags
+ without surrounding double quotes (") and unescaped from \<CHAR>.
+ """
+ etags = ETAG_MATCH.findall(etag_str)
+ if not etags:
+ # etag_str has wrong format, treat it as an opaque string then
+ return [etag_str]
+ etags = [e.decode('string_escape') for e in etags]
+ return etags
+
+def quote_etag(etag):
+ """
+ Wraps a string in double quotes escaping contents as necesary.
+ """
+ return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
+
diff --git a/google/appengine/_internal/django/utils/importlib.py b/google/appengine/_internal/django/utils/importlib.py
new file mode 100644
index 0000000..ef4d0e4
--- /dev/null
+++ b/google/appengine/_internal/django/utils/importlib.py
@@ -0,0 +1,36 @@
+# Taken from Python 2.7 with permission from/by the original author.
+import sys
+
+def _resolve_name(name, package, level):
+ """Return the absolute name of the module to be imported."""
+ if not hasattr(package, 'rindex'):
+ raise ValueError("'package' not set to a string")
+ dot = len(package)
+ for x in xrange(level, 1, -1):
+ try:
+ dot = package.rindex('.', 0, dot)
+ except ValueError:
+ raise ValueError("attempted relative import beyond top-level "
+ "package")
+ return "%s.%s" % (package[:dot], name)
+
+
+def import_module(name, package=None):
+ """Import a module.
+
+ The 'package' argument is required when performing a relative import. It
+ specifies the package to use as the anchor point from which to resolve the
+ relative import to an absolute import.
+
+ """
+ if name.startswith('.'):
+ if not package:
+ raise TypeError("relative imports require the 'package' argument")
+ level = 0
+ for character in name:
+ if character != '.':
+ break
+ level += 1
+ name = _resolve_name(name[level:], package, level)
+ __import__(name)
+ return sys.modules[name]
diff --git a/google/appengine/_internal/django/utils/itercompat.py b/google/appengine/_internal/django/utils/itercompat.py
new file mode 100644
index 0000000..d4ff250
--- /dev/null
+++ b/google/appengine/_internal/django/utils/itercompat.py
@@ -0,0 +1,45 @@
+"""
+Providing iterator functions that are not in all version of Python we support.
+Where possible, we try to use the system-native version and only fall back to
+these implementations if necessary.
+"""
+
+import itertools
+
+# Fallback for Python 2.4, Python 2.5
+def product(*args, **kwds):
+ """
+ Taken from http://docs.python.org/library/itertools.html#itertools.product
+ """
+ # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
+ # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
+ pools = map(tuple, args) * kwds.get('repeat', 1)
+ result = [[]]
+ for pool in pools:
+ result = [x+[y] for x in result for y in pool]
+ for prod in result:
+ yield tuple(prod)
+
+if hasattr(itertools, 'product'):
+ product = itertools.product
+
+def is_iterable(x):
+ "A implementation independent way of checking for iterables"
+ try:
+ iter(x)
+ except TypeError:
+ return False
+ else:
+ return True
+
+def all(iterable):
+ for item in iterable:
+ if not item:
+ return False
+ return True
+
+def any(iterable):
+ for item in iterable:
+ if item:
+ return True
+ return False
diff --git a/google/appengine/_internal/django/utils/module_loading.py b/google/appengine/_internal/django/utils/module_loading.py
new file mode 100644
index 0000000..32ca69a
--- /dev/null
+++ b/google/appengine/_internal/django/utils/module_loading.py
@@ -0,0 +1,63 @@
+import imp
+import os
+import sys
+
+
+def module_has_submodule(package, module_name):
+ """See if 'module' is in 'package'."""
+ name = ".".join([package.__name__, module_name])
+ try:
+ # None indicates a cached miss; see mark_miss() in Python/import.c.
+ return sys.modules[name] is not None
+ except KeyError:
+ pass
+ for finder in sys.meta_path:
+ if finder.find_module(name):
+ return True
+ for entry in package.__path__: # No __path__, then not a package.
+ try:
+ # Try the cached finder.
+ finder = sys.path_importer_cache[entry]
+ if finder is None:
+ # Implicit import machinery should be used.
+ try:
+ file_, _, _ = imp.find_module(module_name, [entry])
+ if file_:
+ file_.close()
+ return True
+ except ImportError:
+ continue
+ # Else see if the finder knows of a loader.
+ elif finder.find_module(name):
+ return True
+ else:
+ continue
+ except KeyError:
+ # No cached finder, so try and make one.
+ for hook in sys.path_hooks:
+ try:
+ finder = hook(entry)
+ # XXX Could cache in sys.path_importer_cache
+ if finder.find_module(name):
+ return True
+ else:
+ # Once a finder is found, stop the search.
+ break
+ except ImportError:
+ # Continue the search for a finder.
+ continue
+ else:
+ # No finder found.
+ # Try the implicit import machinery if searching a directory.
+ if os.path.isdir(entry):
+ try:
+ file_, _, _ = imp.find_module(module_name, [entry])
+ if file_:
+ file_.close()
+ return True
+ except ImportError:
+ pass
+ # XXX Could insert None or NullImporter
+ else:
+ # Exhausted the search, so the module cannot be found.
+ return False
diff --git a/google/appengine/_internal/django/utils/numberformat.py b/google/appengine/_internal/django/utils/numberformat.py
new file mode 100644
index 0000000..f69703b
--- /dev/null
+++ b/google/appengine/_internal/django/utils/numberformat.py
@@ -0,0 +1,47 @@
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils.safestring import mark_safe
+
+
+def format(number, decimal_sep, decimal_pos, grouping=0, thousand_sep=''):
+ """
+ Gets a number (as a number or string), and returns it as a string,
+ using formats definied as arguments:
+
+ * decimal_sep: Decimal separator symbol (for example ".")
+ * decimal_pos: Number of decimal positions
+ * grouping: Number of digits in every group limited by thousand separator
+ * thousand_sep: Thousand separator symbol (for example ",")
+
+ """
+ use_grouping = settings.USE_L10N and settings.USE_THOUSAND_SEPARATOR and grouping
+ # Make the common case fast:
+ if isinstance(number, int) and not use_grouping and not decimal_pos:
+ return mark_safe(unicode(number))
+ # sign
+ if float(number) < 0:
+ sign = '-'
+ else:
+ sign = ''
+ str_number = unicode(number)
+ if str_number[0] == '-':
+ str_number = str_number[1:]
+ # decimal part
+ if '.' in str_number:
+ int_part, dec_part = str_number.split('.')
+ if decimal_pos:
+ dec_part = dec_part[:decimal_pos]
+ else:
+ int_part, dec_part = str_number, ''
+ if decimal_pos:
+ dec_part = dec_part + ('0' * (decimal_pos - len(dec_part)))
+ if dec_part: dec_part = decimal_sep + dec_part
+ # grouping
+ if use_grouping:
+ int_part_gd = ''
+ for cnt, digit in enumerate(int_part[::-1]):
+ if cnt and not cnt % grouping:
+ int_part_gd += thousand_sep
+ int_part_gd += digit
+ int_part = int_part_gd[::-1]
+ return sign + int_part + dec_part
+
diff --git a/google/appengine/_internal/django/utils/regex_helper.py b/google/appengine/_internal/django/utils/regex_helper.py
new file mode 100644
index 0000000..b11fe96
--- /dev/null
+++ b/google/appengine/_internal/django/utils/regex_helper.py
@@ -0,0 +1,328 @@
+"""
+Functions for reversing a regular expression (used in reverse URL resolving).
+Used internally by Django and not intended for external use.
+
+This is not, and is not intended to be, a complete reg-exp decompiler. It
+should be good enough for a large class of URLS, however.
+"""
+
+# Mapping of an escape character to a representative of that class. So, e.g.,
+# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
+# this sequence. Any missing key is mapped to itself.
+ESCAPE_MAPPINGS = {
+ "A": None,
+ "b": None,
+ "B": None,
+ "d": u"0",
+ "D": u"x",
+ "s": u" ",
+ "S": u"x",
+ "w": u"x",
+ "W": u"!",
+ "Z": None,
+}
+
+class Choice(list):
+ """
+ Used to represent multiple possibilities at this point in a pattern string.
+ We use a distinguished type, rather than a list, so that the usage in the
+ code is clear.
+ """
+
+class Group(list):
+ """
+ Used to represent a capturing group in the pattern string.
+ """
+
+class NonCapture(list):
+ """
+ Used to represent a non-capturing group in the pattern string.
+ """
+
+def normalize(pattern):
+ """
+ Given a reg-exp pattern, normalizes it to a list of forms that suffice for
+ reverse matching. This does the following:
+
+ (1) For any repeating sections, keeps the minimum number of occurrences
+ permitted (this means zero for optional groups).
+ (2) If an optional group includes parameters, include one occurrence of
+ that group (along with the zero occurrence case from step (1)).
+ (3) Select the first (essentially an arbitrary) element from any character
+ class. Select an arbitrary character for any unordered class (e.g. '.'
+ or '\w') in the pattern.
+ (5) Ignore comments and any of the reg-exp flags that won't change
+ what we construct ("iLmsu"). "(?x)" is an error, however.
+ (6) Raise an error on all other non-capturing (?...) forms (e.g.
+ look-ahead and look-behind matches) and any disjunctive ('|')
+ constructs.
+
+ Django's URLs for forward resolving are either all positional arguments or
+ all keyword arguments. That is assumed here, as well. Although reverse
+ resolving can be done using positional args when keyword args are
+ specified, the two cannot be mixed in the same reverse() call.
+ """
+ # Do a linear scan to work out the special features of this pattern. The
+ # idea is that we scan once here and collect all the information we need to
+ # make future decisions.
+ result = []
+ non_capturing_groups = []
+ consume_next = True
+ pattern_iter = next_char(iter(pattern))
+ num_args = 0
+
+ # A "while" loop is used here because later on we need to be able to peek
+ # at the next character and possibly go around without consuming another
+ # one at the top of the loop.
+ try:
+ ch, escaped = pattern_iter.next()
+ except StopIteration:
+ return zip([u''], [[]])
+
+ try:
+ while True:
+ if escaped:
+ result.append(ch)
+ elif ch == '.':
+ # Replace "any character" with an arbitrary representative.
+ result.append(u".")
+ elif ch == '|':
+ # FIXME: One day we'll should do this, but not in 1.0.
+ raise NotImplementedError
+ elif ch == "^":
+ pass
+ elif ch == '$':
+ break
+ elif ch == ')':
+ # This can only be the end of a non-capturing group, since all
+ # other unescaped parentheses are handled by the grouping
+ # section later (and the full group is handled there).
+ #
+ # We regroup everything inside the capturing group so that it
+ # can be quantified, if necessary.
+ start = non_capturing_groups.pop()
+ inner = NonCapture(result[start:])
+ result = result[:start] + [inner]
+ elif ch == '[':
+ # Replace ranges with the first character in the range.
+ ch, escaped = pattern_iter.next()
+ result.append(ch)
+ ch, escaped = pattern_iter.next()
+ while escaped or ch != ']':
+ ch, escaped = pattern_iter.next()
+ elif ch == '(':
+ # Some kind of group.
+ ch, escaped = pattern_iter.next()
+ if ch != '?' or escaped:
+ # A positional group
+ name = "_%d" % num_args
+ num_args += 1
+ result.append(Group(((u"%%(%s)s" % name), name)))
+ walk_to_end(ch, pattern_iter)
+ else:
+ ch, escaped = pattern_iter.next()
+ if ch in "iLmsu#":
+ # All of these are ignorable. Walk to the end of the
+ # group.
+ walk_to_end(ch, pattern_iter)
+ elif ch == ':':
+ # Non-capturing group
+ non_capturing_groups.append(len(result))
+ elif ch != 'P':
+ # Anything else, other than a named group, is something
+ # we cannot reverse.
+ raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
+ else:
+ ch, escaped = pattern_iter.next()
+ if ch != '<':
+ raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
+ # We are in a named capturing group. Extra the name and
+ # then skip to the end.
+ name = []
+ ch, escaped = pattern_iter.next()
+ while ch != '>':
+ name.append(ch)
+ ch, escaped = pattern_iter.next()
+ param = ''.join(name)
+ result.append(Group(((u"%%(%s)s" % param), param)))
+ walk_to_end(ch, pattern_iter)
+ elif ch in "*?+{":
+ # Quanitifers affect the previous item in the result list.
+ count, ch = get_quantifier(ch, pattern_iter)
+ if ch:
+ # We had to look ahead, but it wasn't need to compute the
+ # quanitifer, so use this character next time around the
+ # main loop.
+ consume_next = False
+
+ if count == 0:
+ if contains(result[-1], Group):
+ # If we are quantifying a capturing group (or
+ # something containing such a group) and the minimum is
+ # zero, we must also handle the case of one occurrence
+ # being present. All the quantifiers (except {0,0},
+ # which we conveniently ignore) that have a 0 minimum
+ # also allow a single occurrence.
+ result[-1] = Choice([None, result[-1]])
+ else:
+ result.pop()
+ elif count > 1:
+ result.extend([result[-1]] * (count - 1))
+ else:
+ # Anything else is a literal.
+ result.append(ch)
+
+ if consume_next:
+ ch, escaped = pattern_iter.next()
+ else:
+ consume_next = True
+ except StopIteration:
+ pass
+ except NotImplementedError:
+ # A case of using the disjunctive form. No results for you!
+ return zip([u''], [[]])
+
+ return zip(*flatten_result(result))
+
+def next_char(input_iter):
+ """
+ An iterator that yields the next character from "pattern_iter", respecting
+ escape sequences. An escaped character is replaced by a representative of
+ its class (e.g. \w -> "x"). If the escaped character is one that is
+ skipped, it is not returned (the next character is returned instead).
+
+ Yields the next character, along with a boolean indicating whether it is a
+ raw (unescaped) character or not.
+ """
+ for ch in input_iter:
+ if ch != '\\':
+ yield ch, False
+ continue
+ ch = input_iter.next()
+ representative = ESCAPE_MAPPINGS.get(ch, ch)
+ if representative is None:
+ continue
+ yield representative, True
+
+def walk_to_end(ch, input_iter):
+ """
+ The iterator is currently inside a capturing group. We want to walk to the
+ close of this group, skipping over any nested groups and handling escaped
+ parentheses correctly.
+ """
+ if ch == '(':
+ nesting = 1
+ else:
+ nesting = 0
+ for ch, escaped in input_iter:
+ if escaped:
+ continue
+ elif ch == '(':
+ nesting += 1
+ elif ch == ')':
+ if not nesting:
+ return
+ nesting -= 1
+
+def get_quantifier(ch, input_iter):
+ """
+ Parse a quantifier from the input, where "ch" is the first character in the
+ quantifier.
+
+ Returns the minimum number of occurences permitted by the quantifier and
+ either None or the next character from the input_iter if the next character
+ is not part of the quantifier.
+ """
+ if ch in '*?+':
+ try:
+ ch2, escaped = input_iter.next()
+ except StopIteration:
+ ch2 = None
+ if ch2 == '?':
+ ch2 = None
+ if ch == '+':
+ return 1, ch2
+ return 0, ch2
+
+ quant = []
+ while ch != '}':
+ ch, escaped = input_iter.next()
+ quant.append(ch)
+ quant = quant[:-1]
+ values = ''.join(quant).split(',')
+
+ # Consume the trailing '?', if necessary.
+ try:
+ ch, escaped = input_iter.next()
+ except StopIteration:
+ ch = None
+ if ch == '?':
+ ch = None
+ return int(values[0]), ch
+
+def contains(source, inst):
+ """
+ Returns True if the "source" contains an instance of "inst". False,
+ otherwise.
+ """
+ if isinstance(source, inst):
+ return True
+ if isinstance(source, NonCapture):
+ for elt in source:
+ if contains(elt, inst):
+ return True
+ return False
+
+def flatten_result(source):
+ """
+ Turns the given source sequence into a list of reg-exp possibilities and
+ their arguments. Returns a list of strings and a list of argument lists.
+ Each of the two lists will be of the same length.
+ """
+ if source is None:
+ return [u''], [[]]
+ if isinstance(source, Group):
+ if source[1] is None:
+ params = []
+ else:
+ params = [source[1]]
+ return [source[0]], [params]
+ result = [u'']
+ result_args = [[]]
+ pos = last = 0
+ for pos, elt in enumerate(source):
+ if isinstance(elt, basestring):
+ continue
+ piece = u''.join(source[last:pos])
+ if isinstance(elt, Group):
+ piece += elt[0]
+ param = elt[1]
+ else:
+ param = None
+ last = pos + 1
+ for i in range(len(result)):
+ result[i] += piece
+ if param:
+ result_args[i].append(param)
+ if isinstance(elt, (Choice, NonCapture)):
+ if isinstance(elt, NonCapture):
+ elt = [elt]
+ inner_result, inner_args = [], []
+ for item in elt:
+ res, args = flatten_result(item)
+ inner_result.extend(res)
+ inner_args.extend(args)
+ new_result = []
+ new_args = []
+ for item, args in zip(result, result_args):
+ for i_item, i_args in zip(inner_result, inner_args):
+ new_result.append(item + i_item)
+ new_args.append(args[:] + i_args)
+ result = new_result
+ result_args = new_args
+ if pos >= last:
+ piece = u''.join(source[last:])
+ for i in range(len(result)):
+ result[i] += piece
+ return result, result_args
+
diff --git a/google/appengine/_internal/django/utils/safestring.py b/google/appengine/_internal/django/utils/safestring.py
new file mode 100644
index 0000000..63eff2a
--- /dev/null
+++ b/google/appengine/_internal/django/utils/safestring.py
@@ -0,0 +1,119 @@
+"""
+Functions for working with "safe strings": strings that can be displayed safely
+without further escaping in HTML. Marking something as a "safe string" means
+that the producer of the string has already turned characters that should not
+be interpreted by the HTML engine (e.g. '<') into the appropriate entities.
+"""
+from google.appengine._internal.django.utils.functional import curry, Promise
+
+class EscapeData(object):
+ pass
+
+class EscapeString(str, EscapeData):
+ """
+ A string that should be HTML-escaped when output.
+ """
+ pass
+
+class EscapeUnicode(unicode, EscapeData):
+ """
+ A unicode object that should be HTML-escaped when output.
+ """
+ pass
+
+class SafeData(object):
+ pass
+
+class SafeString(str, SafeData):
+ """
+ A string subclass that has been specifically marked as "safe" (requires no
+ further escaping) for HTML output purposes.
+ """
+ def __add__(self, rhs):
+ """
+ Concatenating a safe string with another safe string or safe unicode
+ object is safe. Otherwise, the result is no longer safe.
+ """
+ t = super(SafeString, self).__add__(rhs)
+ if isinstance(rhs, SafeUnicode):
+ return SafeUnicode(t)
+ elif isinstance(rhs, SafeString):
+ return SafeString(t)
+ return t
+
+ def _proxy_method(self, *args, **kwargs):
+ """
+ Wrap a call to a normal unicode method up so that we return safe
+ results. The method that is being wrapped is passed in the 'method'
+ argument.
+ """
+ method = kwargs.pop('method')
+ data = method(self, *args, **kwargs)
+ if isinstance(data, str):
+ return SafeString(data)
+ else:
+ return SafeUnicode(data)
+
+ decode = curry(_proxy_method, method = str.decode)
+
+class SafeUnicode(unicode, SafeData):
+ """
+ A unicode subclass that has been specifically marked as "safe" for HTML
+ output purposes.
+ """
+ def __add__(self, rhs):
+ """
+ Concatenating a safe unicode object with another safe string or safe
+ unicode object is safe. Otherwise, the result is no longer safe.
+ """
+ t = super(SafeUnicode, self).__add__(rhs)
+ if isinstance(rhs, SafeData):
+ return SafeUnicode(t)
+ return t
+
+ def _proxy_method(self, *args, **kwargs):
+ """
+ Wrap a call to a normal unicode method up so that we return safe
+ results. The method that is being wrapped is passed in the 'method'
+ argument.
+ """
+ method = kwargs.pop('method')
+ data = method(self, *args, **kwargs)
+ if isinstance(data, str):
+ return SafeString(data)
+ else:
+ return SafeUnicode(data)
+
+ encode = curry(_proxy_method, method = unicode.encode)
+
+def mark_safe(s):
+ """
+ Explicitly mark a string as safe for (HTML) output purposes. The returned
+ object can be used everywhere a string or unicode object is appropriate.
+
+ Can be called multiple times on a single string.
+ """
+ if isinstance(s, SafeData):
+ return s
+ if isinstance(s, str) or (isinstance(s, Promise) and s._delegate_str):
+ return SafeString(s)
+ if isinstance(s, (unicode, Promise)):
+ return SafeUnicode(s)
+ return SafeString(str(s))
+
+def mark_for_escaping(s):
+ """
+ Explicitly mark a string as requiring HTML escaping upon output. Has no
+ effect on SafeData subclasses.
+
+ Can be called multiple times on a single string (the resulting escaping is
+ only applied once).
+ """
+ if isinstance(s, (SafeData, EscapeData)):
+ return s
+ if isinstance(s, str) or (isinstance(s, Promise) and s._delegate_str):
+ return EscapeString(s)
+ if isinstance(s, (unicode, Promise)):
+ return EscapeUnicode(s)
+ return EscapeString(str(s))
+
diff --git a/google/appengine/_internal/django/utils/simplejson/__init__.py b/google/appengine/_internal/django/utils/simplejson/__init__.py
new file mode 100644
index 0000000..c826968
--- /dev/null
+++ b/google/appengine/_internal/django/utils/simplejson/__init__.py
@@ -0,0 +1,349 @@
+r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+:mod:`simplejson` exposes an API familiar to users of the standard library
+:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
+version of the :mod:`json` library contained in Python 2.6, but maintains
+compatibility with Python 2.4 and Python 2.5 and (currently) has
+significant performance advantages, even without using the optional C
+extension for speedups.
+
+Encoding basic Python object hierarchies::
+
+ >>> import simplejson as json
+ >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+ '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+ >>> print json.dumps("\"foo\bar")
+ "\"foo\bar"
+ >>> print json.dumps(u'\u1234')
+ "\u1234"
+ >>> print json.dumps('\\')
+ "\\"
+ >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ {"a": 0, "b": 0, "c": 0}
+ >>> from StringIO import StringIO
+ >>> io = StringIO()
+ >>> json.dump(['streaming API'], io)
+ >>> io.getvalue()
+ '["streaming API"]'
+
+Compact encoding::
+
+ >>> import simplejson as json
+ >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+ '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+ >>> import simplejson as json
+ >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+ >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
+ {
+ "4": 5,
+ "6": 7
+ }
+
+Decoding JSON::
+
+ >>> import simplejson as json
+ >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+ >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
+ True
+ >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
+ True
+ >>> from StringIO import StringIO
+ >>> io = StringIO('["streaming API"]')
+ >>> json.load(io)[0] == 'streaming API'
+ True
+
+Specializing JSON object decoding::
+
+ >>> import simplejson as json
+ >>> def as_complex(dct):
+ ... if '__complex__' in dct:
+ ... return complex(dct['real'], dct['imag'])
+ ... return dct
+ ...
+ >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
+ ... object_hook=as_complex)
+ (1+2j)
+ >>> import decimal
+ >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
+ True
+
+Specializing JSON object encoding::
+
+ >>> import simplejson as json
+ >>> def encode_complex(obj):
+ ... if isinstance(obj, complex):
+ ... return [obj.real, obj.imag]
+ ... raise TypeError("%r is not JSON serializable" % (o,))
+ ...
+ >>> json.dumps(2 + 1j, default=encode_complex)
+ '[2.0, 1.0]'
+ >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
+ '[2.0, 1.0]'
+ >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
+ '[2.0, 1.0]'
+
+
+Using simplejson.tool from the shell to validate and pretty-print::
+
+ $ echo '{"json":"obj"}' | python -msimplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -msimplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+"""
+
+# Django modification: try to use the system version first, providing it's
+# either of a later version of has the C speedups in place. Otherwise, fall
+# back to our local copy.
+
+__version__ = '2.0.7'
+
+use_system_version = False
+try:
+ # The system-installed version has priority providing it is either not an
+ # earlier version or it contains the C speedups.
+ import simplejson
+ if (simplejson.__version__.split('.') >= __version__.split('.') or
+ hasattr(simplejson, '_speedups')):
+ from simplejson import *
+ use_system_version = True
+except ImportError:
+ pass
+
+if not use_system_version:
+ try:
+ from json import * # Python 2.6 preferred over local copy.
+
+ # There is a "json" package around that is not Python's "json", so we
+ # check for something that is only in the namespace of the version we
+ # want.
+ JSONDecoder
+
+ use_system_version = True
+ except (ImportError, NameError):
+ pass
+
+# If all else fails, we have a bundled version that can be used.
+if not use_system_version:
+ __all__ = [
+ 'dump', 'dumps', 'load', 'loads',
+ 'JSONDecoder', 'JSONEncoder',
+ ]
+
+ from google.appengine._internal.django.utils.simplejson.decoder import JSONDecoder
+ from google.appengine._internal.django.utils.simplejson.encoder import JSONEncoder
+
+ _default_encoder = JSONEncoder(
+ skipkeys=False,
+ ensure_ascii=True,
+ check_circular=True,
+ allow_nan=True,
+ indent=None,
+ separators=None,
+ encoding='utf-8',
+ default=None,
+ )
+
+ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+ ``.write()``-supporting file-like object).
+
+ If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
+ may be ``unicode`` instances, subject to normal Python ``str`` to
+ ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+ understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+ to cause an error.
+
+ If ``check_circular`` is ``False``, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+ in strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and object
+ members will be pretty-printed with that indent level. An indent level
+ of 0 will only insert newlines. ``None`` is the most compact representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (skipkeys is False and ensure_ascii is True and
+ check_circular is True and allow_nan is True and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ iterable = _default_encoder.iterencode(obj)
+ else:
+ if cls is None:
+ cls = JSONEncoder
+ iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding,
+ default=default, **kw).iterencode(obj)
+ # could accelerate with writelines in some versions of Python, at
+ # a debuggability cost
+ for chunk in iterable:
+ fp.write(chunk)
+
+
+ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` to a JSON formatted ``str``.
+
+ If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is ``False``, then the return value will be a
+ ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+ coercion rules instead of being escaped to an ASCII ``str``.
+
+ If ``check_circular`` is ``False``, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+ strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and
+ object members will be pretty-printed with that indent level. An indent
+ level of 0 will only insert newlines. ``None`` is the most compact
+ representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (skipkeys is False and ensure_ascii is True and
+ check_circular is True and allow_nan is True and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ return _default_encoder.encode(obj)
+ if cls is None:
+ cls = JSONEncoder
+ return cls(
+ skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding, default=default,
+ **kw).encode(obj)
+
+
+ _default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+ a JSON document) to a Python object.
+
+ If the contents of ``fp`` is encoded with an ASCII based encoding other
+ than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+ be specified. Encodings that are not ASCII based (such as UCS-2) are
+ not allowed, and should be wrapped with
+ ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+ object and passed to ``loads()``
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ return loads(fp.read(),
+ encoding=encoding, cls=cls, object_hook=object_hook,
+ parse_float=parse_float, parse_int=parse_int,
+ parse_constant=parse_constant, **kw)
+
+
+ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+ document) to a Python object.
+
+ If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+ other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+ must be specified. Encodings that are not ASCII based (such as UCS-2)
+ are not allowed and should be decoded to ``unicode`` first.
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN, null, true, false.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ if (cls is None and encoding is None and object_hook is None and
+ parse_int is None and parse_float is None and
+ parse_constant is None and not kw):
+ return _default_decoder.decode(s)
+ if cls is None:
+ cls = JSONDecoder
+ if object_hook is not None:
+ kw['object_hook'] = object_hook
+ if parse_float is not None:
+ kw['parse_float'] = parse_float
+ if parse_int is not None:
+ kw['parse_int'] = parse_int
+ if parse_constant is not None:
+ kw['parse_constant'] = parse_constant
+ return cls(encoding=encoding, **kw).decode(s)
diff --git a/google/appengine/_internal/django/utils/simplejson/decoder.py b/google/appengine/_internal/django/utils/simplejson/decoder.py
new file mode 100644
index 0000000..2755c5e
--- /dev/null
+++ b/google/appengine/_internal/django/utils/simplejson/decoder.py
@@ -0,0 +1,345 @@
+"""Implementation of JSONDecoder
+"""
+import re
+import sys
+import struct
+
+from google.appengine._internal.django.utils.simplejson.scanner import make_scanner
+c_scanstring = None
+
+__all__ = ['JSONDecoder']
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+ _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+ if sys.byteorder != 'big':
+ _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+ nan, inf = struct.unpack('dd', _BYTES)
+ return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+ lineno = doc.count('\n', 0, pos) + 1
+ if lineno == 1:
+ colno = pos
+ else:
+ colno = pos - doc.rindex('\n', 0, pos)
+ return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+ # Note that this function is called from _speedups
+ lineno, colno = linecol(doc, pos)
+ if end is None:
+ return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
+ endlineno, endcolno = linecol(doc, end)
+ return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
+ msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+ '-Infinity': NegInf,
+ 'Infinity': PosInf,
+ 'NaN': NaN,
+}
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+ '"': u'"', '\\': u'\\', '/': u'/',
+ 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+ """Scan the string s for a JSON string. End is the index of the
+ character in s after the quote that started the JSON string.
+ Unescapes all valid JSON string escape sequences and raises ValueError
+ on attempt to decode an invalid string. If strict is False then literal
+ control characters are allowed in the string.
+
+ Returns a tuple of the decoded string and the index of the character in s
+ after the end quote."""
+ if encoding is None:
+ encoding = DEFAULT_ENCODING
+ chunks = []
+ _append = chunks.append
+ begin = end - 1
+ while 1:
+ chunk = _m(s, end)
+ if chunk is None:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ end = chunk.end()
+ content, terminator = chunk.groups()
+ # Content is contains zero or more unescaped string characters
+ if content:
+ if not isinstance(content, unicode):
+ content = unicode(content, encoding)
+ _append(content)
+ # Terminator is the end of string, a literal control character,
+ # or a backslash denoting that an escape sequence follows
+ if terminator == '"':
+ break
+ elif terminator != '\\':
+ if strict:
+ msg = "Invalid control character %r at" % (terminator,)
+ raise ValueError(msg, s, end)
+ else:
+ _append(terminator)
+ continue
+ try:
+ esc = s[end]
+ except IndexError:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ # If not a unicode escape sequence, must be in the lookup table
+ if esc != 'u':
+ try:
+ char = _b[esc]
+ except KeyError:
+ raise ValueError(
+ errmsg("Invalid \\escape: %r" % (esc,), s, end))
+ end += 1
+ else:
+ # Unicode escape sequence
+ esc = s[end + 1:end + 5]
+ next_end = end + 5
+ if len(esc) != 4:
+ msg = "Invalid \\uXXXX escape"
+ raise ValueError(errmsg(msg, s, end))
+ uni = int(esc, 16)
+ # Check for surrogate pair on UCS-4 systems
+ if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+ msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+ if not s[end + 5:end + 7] == '\\u':
+ raise ValueError(errmsg(msg, s, end))
+ esc2 = s[end + 7:end + 11]
+ if len(esc2) != 4:
+ raise ValueError(errmsg(msg, s, end))
+ uni2 = int(esc2, 16)
+ uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+ next_end += 6
+ char = unichr(uni)
+ end = next_end
+ # Append the unescaped character
+ _append(char)
+ return u''.join(chunks), end
+
+
+# Use speedup if available
+scanstring = c_scanstring or py_scanstring
+
+WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
+WHITESPACE_STR = ' \t\n\r'
+
+def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ pairs = {}
+ # Use a slice to prevent IndexError from being raised, the following
+ # check will raise a more specific ValueError if the string is empty
+ nextchar = s[end:end + 1]
+ # Normally we expect nextchar == '"'
+ if nextchar != '"':
+ if nextchar in _ws:
+ end = _w(s, end).end()
+ nextchar = s[end:end + 1]
+ # Trivial empty object
+ if nextchar == '}':
+ return pairs, end + 1
+ elif nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end))
+ end += 1
+ while True:
+ key, end = scanstring(s, end, encoding, strict)
+
+ # To skip some function call overhead we optimize the fast paths where
+ # the JSON key separator is ": " or just ":".
+ if s[end:end + 1] != ':':
+ end = _w(s, end).end()
+ if s[end:end + 1] != ':':
+ raise ValueError(errmsg("Expecting : delimiter", s, end))
+
+ end += 1
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ pairs[key] = value
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+ end += 1
+
+ if nextchar == '}':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end += 1
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+
+ end += 1
+ if nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end - 1))
+
+ if object_hook is not None:
+ pairs = object_hook(pairs)
+ return pairs, end
+
+def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ values = []
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ # Look-ahead for trivial empty array
+ if nextchar == ']':
+ return values, end + 1
+ _append = values.append
+ while True:
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ _append(value)
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ end += 1
+ if nextchar == ']':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end))
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ return values, end
+
+class JSONDecoder(object):
+ """Simple JSON <http://json.org> decoder
+
+ Performs the following translations in decoding by default:
+
+ +---------------+-------------------+
+ | JSON | Python |
+ +===============+===================+
+ | object | dict |
+ +---------------+-------------------+
+ | array | list |
+ +---------------+-------------------+
+ | string | unicode |
+ +---------------+-------------------+
+ | number (int) | int, long |
+ +---------------+-------------------+
+ | number (real) | float |
+ +---------------+-------------------+
+ | true | True |
+ +---------------+-------------------+
+ | false | False |
+ +---------------+-------------------+
+ | null | None |
+ +---------------+-------------------+
+
+ It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+ their corresponding ``float`` values, which is outside the JSON spec.
+
+ """
+
+ def __init__(self, encoding=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, strict=True):
+ """``encoding`` determines the encoding used to interpret any ``str``
+ objects decoded by this instance (utf-8 by default). It has no
+ effect when decoding ``unicode`` objects.
+
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as ``unicode``.
+
+ ``object_hook``, if specified, will be called with the result
+ of every JSON object decoded and its return value will be used in
+ place of the given ``dict``. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ """
+ self.encoding = encoding
+ self.object_hook = object_hook
+ self.parse_float = parse_float or float
+ self.parse_int = parse_int or int
+ self.parse_constant = parse_constant or _CONSTANTS.__getitem__
+ self.strict = strict
+ self.parse_object = JSONObject
+ self.parse_array = JSONArray
+ self.parse_string = scanstring
+ self.scan_once = make_scanner(self)
+
+ def decode(self, s, _w=WHITESPACE.match):
+ """Return the Python representation of ``s`` (a ``str`` or ``unicode``
+ instance containing a JSON document)
+
+ """
+ obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+ end = _w(s, end).end()
+ if end != len(s):
+ raise ValueError(errmsg("Extra data", s, end, len(s)))
+ return obj
+
+ def raw_decode(self, s, idx=0):
+ """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+ with a JSON document) and return a 2-tuple of the Python
+ representation and the index in ``s`` where the document ended.
+
+ This can be used to decode a JSON document from a string that may
+ have extraneous data at the end.
+
+ """
+ try:
+ obj, end = self.scan_once(s, idx)
+ except StopIteration:
+ raise ValueError("No JSON object could be decoded")
+ return obj, end
diff --git a/google/appengine/_internal/django/utils/simplejson/encoder.py b/google/appengine/_internal/django/utils/simplejson/encoder.py
new file mode 100644
index 0000000..06ebe62
--- /dev/null
+++ b/google/appengine/_internal/django/utils/simplejson/encoder.py
@@ -0,0 +1,430 @@
+"""Implementation of JSONEncoder
+"""
+import re
+
+c_encode_basestring_ascii = None
+c_make_encoder = None
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+ '\\': '\\\\',
+ '"': '\\"',
+ '\b': '\\b',
+ '\f': '\\f',
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+}
+for i in range(0x20):
+ ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def encode_basestring(s):
+ """Return a JSON representation of a Python string
+
+ """
+ def replace(match):
+ return ESCAPE_DCT[match.group(0)]
+ return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+ """Return an ASCII-only JSON representation of a Python string
+
+ """
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
+ def replace(match):
+ s = match.group(0)
+ try:
+ return ESCAPE_DCT[s]
+ except KeyError:
+ n = ord(s)
+ if n < 0x10000:
+ return '\\u%04x' % (n,)
+ else:
+ # surrogate pair
+ n -= 0x10000
+ s1 = 0xd800 | ((n >> 10) & 0x3ff)
+ s2 = 0xdc00 | (n & 0x3ff)
+ return '\\u%04x\\u%04x' % (s1, s2)
+ return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
+
+class JSONEncoder(object):
+ """Extensible JSON <http://json.org> encoder for Python data structures.
+
+ Supports the following objects and types by default:
+
+ +-------------------+---------------+
+ | Python | JSON |
+ +===================+===============+
+ | dict | object |
+ +-------------------+---------------+
+ | list, tuple | array |
+ +-------------------+---------------+
+ | str, unicode | string |
+ +-------------------+---------------+
+ | int, long, float | number |
+ +-------------------+---------------+
+ | True | true |
+ +-------------------+---------------+
+ | False | false |
+ +-------------------+---------------+
+ | None | null |
+ +-------------------+---------------+
+
+ To extend this to recognize other objects, subclass and implement a
+ ``.default()`` method with another method that returns a serializable
+ object for ``o`` if possible, otherwise it should call the superclass
+ implementation (to raise ``TypeError``).
+
+ """
+ item_separator = ', '
+ key_separator = ': '
+ def __init__(self, skipkeys=False, ensure_ascii=True,
+ check_circular=True, allow_nan=True, sort_keys=False,
+ indent=None, separators=None, encoding='utf-8', default=None):
+ """Constructor for JSONEncoder, with sensible defaults.
+
+ If skipkeys is False, then it is a TypeError to attempt
+ encoding of keys that are not str, int, long, float or None. If
+ skipkeys is True, such items are simply skipped.
+
+ If ensure_ascii is True, the output is guaranteed to be str
+ objects with all incoming unicode characters escaped. If
+ ensure_ascii is false, the output will be unicode object.
+
+ If check_circular is True, then lists, dicts, and custom encoded
+ objects will be checked for circular references during encoding to
+ prevent an infinite recursion (which would cause an OverflowError).
+ Otherwise, no such check takes place.
+
+ If allow_nan is True, then NaN, Infinity, and -Infinity will be
+ encoded as such. This behavior is not JSON specification compliant,
+ but is consistent with most JavaScript based encoders and decoders.
+ Otherwise, it will be a ValueError to encode such floats.
+
+ If sort_keys is True, then the output of dictionaries will be
+ sorted by key; this is useful for regression tests to ensure
+ that JSON serializations can be compared on a day-to-day basis.
+
+ If indent is a non-negative integer, then JSON array
+ elements and object members will be pretty-printed with that
+ indent level. An indent level of 0 will only insert newlines.
+ None is the most compact representation.
+
+ If specified, separators should be a (item_separator, key_separator)
+ tuple. The default is (', ', ': '). To get the most compact JSON
+ representation you should specify (',', ':') to eliminate whitespace.
+
+ If specified, default is a function that gets called for objects
+ that can't otherwise be serialized. It should return a JSON encodable
+ version of the object or raise a ``TypeError``.
+
+ If encoding is not None, then all input strings will be
+ transformed into unicode using that encoding prior to JSON-encoding.
+ The default is UTF-8.
+
+ """
+
+ self.skipkeys = skipkeys
+ self.ensure_ascii = ensure_ascii
+ self.check_circular = check_circular
+ self.allow_nan = allow_nan
+ self.sort_keys = sort_keys
+ self.indent = indent
+ if separators is not None:
+ self.item_separator, self.key_separator = separators
+ if default is not None:
+ self.default = default
+ self.encoding = encoding
+
+ def default(self, o):
+ """Implement this method in a subclass such that it returns
+ a serializable object for ``o``, or calls the base implementation
+ (to raise a ``TypeError``).
+
+ For example, to support arbitrary iterators, you could
+ implement default like this::
+
+ def default(self, o):
+ try:
+ iterable = iter(o)
+ except TypeError:
+ pass
+ else:
+ return list(iterable)
+ return JSONEncoder.default(self, o)
+
+ """
+ raise TypeError("%r is not JSON serializable" % (o,))
+
+ def encode(self, o):
+ """Return a JSON string representation of a Python data structure.
+
+ >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+ '{"foo": ["bar", "baz"]}'
+
+ """
+ # This is for extremely simple cases and benchmarks.
+ if isinstance(o, basestring):
+ if isinstance(o, str):
+ _encoding = self.encoding
+ if (_encoding is not None
+ and not (_encoding == 'utf-8')):
+ o = o.decode(_encoding)
+ if self.ensure_ascii:
+ return encode_basestring_ascii(o)
+ else:
+ return encode_basestring(o)
+ # This doesn't pass the iterator directly to ''.join() because the
+ # exceptions aren't as detailed. The list call should be roughly
+ # equivalent to the PySequence_Fast that ''.join() would do.
+ chunks = self.iterencode(o, _one_shot=True)
+ if not isinstance(chunks, (list, tuple)):
+ chunks = list(chunks)
+ return ''.join(chunks)
+
+ def iterencode(self, o, _one_shot=False):
+ """Encode the given object and yield each string
+ representation as available.
+
+ For example::
+
+ for chunk in JSONEncoder().iterencode(bigobject):
+ mysocket.write(chunk)
+
+ """
+ if self.check_circular:
+ markers = {}
+ else:
+ markers = None
+ if self.ensure_ascii:
+ _encoder = encode_basestring_ascii
+ else:
+ _encoder = encode_basestring
+ if self.encoding != 'utf-8':
+ def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
+ if isinstance(o, str):
+ o = o.decode(_encoding)
+ return _orig_encoder(o)
+
+ def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
+ # Check for specials. Note that this type of test is processor- and/or
+ # platform-specific, so do tests which don't depend on the internals.
+
+ if o != o:
+ text = 'NaN'
+ elif o == _inf:
+ text = 'Infinity'
+ elif o == _neginf:
+ text = '-Infinity'
+ else:
+ return _repr(o)
+
+ if not allow_nan:
+ raise ValueError("Out of range float values are not JSON compliant: %r"
+ % (o,))
+
+ return text
+
+
+ if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
+ _iterencode = c_make_encoder(
+ markers, self.default, _encoder, self.indent,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, self.allow_nan)
+ else:
+ _iterencode = _make_iterencode(
+ markers, self.default, _encoder, self.indent, floatstr,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, _one_shot)
+ return _iterencode(o, 0)
+
+def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
+ ## HACK: hand-optimized bytecode; turn globals into locals
+ False=False,
+ True=True,
+ ValueError=ValueError,
+ basestring=basestring,
+ dict=dict,
+ float=float,
+ id=id,
+ int=int,
+ isinstance=isinstance,
+ list=list,
+ long=long,
+ str=str,
+ tuple=tuple,
+ ):
+
+ def _iterencode_list(lst, _current_indent_level):
+ if not lst:
+ yield '[]'
+ return
+ if markers is not None:
+ markerid = id(lst)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = lst
+ buf = '['
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ separator = _item_separator + newline_indent
+ buf += newline_indent
+ else:
+ newline_indent = None
+ separator = _item_separator
+ first = True
+ for value in lst:
+ if first:
+ first = False
+ else:
+ buf = separator
+ if isinstance(value, basestring):
+ yield buf + _encoder(value)
+ elif value is None:
+ yield buf + 'null'
+ elif value is True:
+ yield buf + 'true'
+ elif value is False:
+ yield buf + 'false'
+ elif isinstance(value, (int, long)):
+ yield buf + str(value)
+ elif isinstance(value, float):
+ yield buf + _floatstr(value)
+ else:
+ yield buf
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield ']'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode_dict(dct, _current_indent_level):
+ if not dct:
+ yield '{}'
+ return
+ if markers is not None:
+ markerid = id(dct)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = dct
+ yield '{'
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ item_separator = _item_separator + newline_indent
+ yield newline_indent
+ else:
+ newline_indent = None
+ item_separator = _item_separator
+ first = True
+ if _sort_keys:
+ items = dct.items()
+ items.sort(key=lambda kv: kv[0])
+ else:
+ items = dct.iteritems()
+ for key, value in items:
+ if isinstance(key, basestring):
+ pass
+ # JavaScript is weakly typed for these, so it makes sense to
+ # also allow them. Many encoders seem to do something like this.
+ elif isinstance(key, float):
+ key = _floatstr(key)
+ elif isinstance(key, (int, long)):
+ key = str(key)
+ elif key is True:
+ key = 'true'
+ elif key is False:
+ key = 'false'
+ elif key is None:
+ key = 'null'
+ elif _skipkeys:
+ continue
+ else:
+ raise TypeError("key %r is not a string" % (key,))
+ if first:
+ first = False
+ else:
+ yield item_separator
+ yield _encoder(key)
+ yield _key_separator
+ if isinstance(value, basestring):
+ yield _encoder(value)
+ elif value is None:
+ yield 'null'
+ elif value is True:
+ yield 'true'
+ elif value is False:
+ yield 'false'
+ elif isinstance(value, (int, long)):
+ yield str(value)
+ elif isinstance(value, float):
+ yield _floatstr(value)
+ else:
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '}'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode(o, _current_indent_level):
+ if isinstance(o, basestring):
+ yield _encoder(o)
+ elif o is None:
+ yield 'null'
+ elif o is True:
+ yield 'true'
+ elif o is False:
+ yield 'false'
+ elif isinstance(o, (int, long)):
+ yield str(o)
+ elif isinstance(o, float):
+ yield _floatstr(o)
+ elif isinstance(o, (list, tuple)):
+ for chunk in _iterencode_list(o, _current_indent_level):
+ yield chunk
+ elif isinstance(o, dict):
+ for chunk in _iterencode_dict(o, _current_indent_level):
+ yield chunk
+ else:
+ if markers is not None:
+ markerid = id(o)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = o
+ o = _default(o)
+ for chunk in _iterencode(o, _current_indent_level):
+ yield chunk
+ if markers is not None:
+ del markers[markerid]
+
+ return _iterencode
diff --git a/google/appengine/_internal/django/utils/simplejson/scanner.py b/google/appengine/_internal/django/utils/simplejson/scanner.py
new file mode 100644
index 0000000..adbc6ec
--- /dev/null
+++ b/google/appengine/_internal/django/utils/simplejson/scanner.py
@@ -0,0 +1,65 @@
+"""JSON token scanner
+"""
+import re
+try:
+ from simplejson._speedups import make_scanner as c_make_scanner
+except ImportError:
+ c_make_scanner = None
+
+__all__ = ['make_scanner']
+
+NUMBER_RE = re.compile(
+ r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
+ (re.VERBOSE | re.MULTILINE | re.DOTALL))
+
+def py_make_scanner(context):
+ parse_object = context.parse_object
+ parse_array = context.parse_array
+ parse_string = context.parse_string
+ match_number = NUMBER_RE.match
+ encoding = context.encoding
+ strict = context.strict
+ parse_float = context.parse_float
+ parse_int = context.parse_int
+ parse_constant = context.parse_constant
+ object_hook = context.object_hook
+
+ def _scan_once(string, idx):
+ try:
+ nextchar = string[idx]
+ except IndexError:
+ raise StopIteration
+
+ if nextchar == '"':
+ return parse_string(string, idx + 1, encoding, strict)
+ elif nextchar == '{':
+ return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
+ elif nextchar == '[':
+ return parse_array((string, idx + 1), _scan_once)
+ elif nextchar == 'n' and string[idx:idx + 4] == 'null':
+ return None, idx + 4
+ elif nextchar == 't' and string[idx:idx + 4] == 'true':
+ return True, idx + 4
+ elif nextchar == 'f' and string[idx:idx + 5] == 'false':
+ return False, idx + 5
+
+ m = match_number(string, idx)
+ if m is not None:
+ integer, frac, exp = m.groups()
+ if frac or exp:
+ res = parse_float(integer + (frac or '') + (exp or ''))
+ else:
+ res = parse_int(integer)
+ return res, m.end()
+ elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
+ return parse_constant('NaN'), idx + 3
+ elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
+ return parse_constant('Infinity'), idx + 8
+ elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
+ return parse_constant('-Infinity'), idx + 9
+ else:
+ raise StopIteration
+
+ return _scan_once
+
+make_scanner = c_make_scanner or py_make_scanner
diff --git a/google/appengine/_internal/django/utils/simplejson/tool.py b/google/appengine/_internal/django/utils/simplejson/tool.py
new file mode 100644
index 0000000..ef4149e
--- /dev/null
+++ b/google/appengine/_internal/django/utils/simplejson/tool.py
@@ -0,0 +1,35 @@
+r"""Using simplejson from the shell to validate and
+pretty-print::
+
+ $ echo '{"json":"obj"}' | python -msimplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -msimplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+"""
+from google.appengine._internal.django.utils import simplejson
+
+def main():
+ import sys
+ if len(sys.argv) == 1:
+ infile = sys.stdin
+ outfile = sys.stdout
+ elif len(sys.argv) == 2:
+ infile = open(sys.argv[1], 'rb')
+ outfile = sys.stdout
+ elif len(sys.argv) == 3:
+ infile = open(sys.argv[1], 'rb')
+ outfile = open(sys.argv[2], 'wb')
+ else:
+ raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],))
+ try:
+ obj = simplejson.load(infile)
+ except ValueError, e:
+ raise SystemExit(e)
+ simplejson.dump(obj, outfile, sort_keys=True, indent=4)
+ outfile.write('\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/google/appengine/_internal/django/utils/stopwords.py b/google/appengine/_internal/django/utils/stopwords.py
new file mode 100644
index 0000000..18aeb7f
--- /dev/null
+++ b/google/appengine/_internal/django/utils/stopwords.py
@@ -0,0 +1,42 @@
+# Performance note: I benchmarked this code using a set instead of
+# a list for the stopwords and was surprised to find that the list
+# performed /better/ than the set - maybe because it's only a small
+# list.
+
+stopwords = '''
+i
+a
+an
+are
+as
+at
+be
+by
+for
+from
+how
+in
+is
+it
+of
+on
+or
+that
+the
+this
+to
+was
+what
+when
+where
+'''.split()
+
+def strip_stopwords(sentence):
+ "Removes stopwords - also normalizes whitespace"
+ words = sentence.split()
+ sentence = []
+ for word in words:
+ if word.lower() not in stopwords:
+ sentence.append(word)
+ return u' '.join(sentence)
+
diff --git a/google/appengine/_internal/django/utils/synch.py b/google/appengine/_internal/django/utils/synch.py
new file mode 100644
index 0000000..2e808c1
--- /dev/null
+++ b/google/appengine/_internal/django/utils/synch.py
@@ -0,0 +1,87 @@
+"""
+Synchronization primitives:
+
+ - reader-writer lock (preference to writers)
+
+(Contributed to Django by eugene@lazutkin.com)
+"""
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+class RWLock:
+ """
+ Classic implementation of reader-writer lock with preference to writers.
+
+ Readers can access a resource simultaneously.
+ Writers get an exclusive access.
+
+ API is self-descriptive:
+ reader_enters()
+ reader_leaves()
+ writer_enters()
+ writer_leaves()
+ """
+ def __init__(self):
+ self.mutex = threading.RLock()
+ self.can_read = threading.Semaphore(0)
+ self.can_write = threading.Semaphore(0)
+ self.active_readers = 0
+ self.active_writers = 0
+ self.waiting_readers = 0
+ self.waiting_writers = 0
+
+ def reader_enters(self):
+ self.mutex.acquire()
+ try:
+ if self.active_writers == 0 and self.waiting_writers == 0:
+ self.active_readers += 1
+ self.can_read.release()
+ else:
+ self.waiting_readers += 1
+ finally:
+ self.mutex.release()
+ self.can_read.acquire()
+
+ def reader_leaves(self):
+ self.mutex.acquire()
+ try:
+ self.active_readers -= 1
+ if self.active_readers == 0 and self.waiting_writers != 0:
+ self.active_writers += 1
+ self.waiting_writers -= 1
+ self.can_write.release()
+ finally:
+ self.mutex.release()
+
+ def writer_enters(self):
+ self.mutex.acquire()
+ try:
+ if self.active_writers == 0 and self.waiting_writers == 0 and self.active_readers == 0:
+ self.active_writers += 1
+ self.can_write.release()
+ else:
+ self.waiting_writers += 1
+ finally:
+ self.mutex.release()
+ self.can_write.acquire()
+
+ def writer_leaves(self):
+ self.mutex.acquire()
+ try:
+ self.active_writers -= 1
+ if self.waiting_writers != 0:
+ self.active_writers += 1
+ self.waiting_writers -= 1
+ self.can_write.release()
+ elif self.waiting_readers != 0:
+ t = self.waiting_readers
+ self.waiting_readers = 0
+ self.active_readers += t
+ while t > 0:
+ self.can_read.release()
+ t -= 1
+ finally:
+ self.mutex.release()
diff --git a/google/appengine/_internal/django/utils/termcolors.py b/google/appengine/_internal/django/utils/termcolors.py
new file mode 100644
index 0000000..4fb64ac
--- /dev/null
+++ b/google/appengine/_internal/django/utils/termcolors.py
@@ -0,0 +1,198 @@
+"""
+termcolors.py
+"""
+
+color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
+foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
+background = dict([(color_names[x], '4%s' % x) for x in range(8)])
+
+RESET = '0'
+opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
+
+def colorize(text='', opts=(), **kwargs):
+ """
+ Returns your text, enclosed in ANSI graphics codes.
+
+ Depends on the keyword arguments 'fg' and 'bg', and the contents of
+ the opts tuple/list.
+
+ Returns the RESET code if no parameters are given.
+
+ Valid colors:
+ 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
+
+ Valid options:
+ 'bold'
+ 'underscore'
+ 'blink'
+ 'reverse'
+ 'conceal'
+ 'noreset' - string will not be auto-terminated with the RESET code
+
+ Examples:
+ colorize('hello', fg='red', bg='blue', opts=('blink',))
+ colorize()
+ colorize('goodbye', opts=('underscore',))
+ print colorize('first line', fg='red', opts=('noreset',))
+ print 'this should be red too'
+ print colorize('and so should this')
+ print 'this should not be red'
+ """
+ code_list = []
+ if text == '' and len(opts) == 1 and opts[0] == 'reset':
+ return '\x1b[%sm' % RESET
+ for k, v in kwargs.iteritems():
+ if k == 'fg':
+ code_list.append(foreground[v])
+ elif k == 'bg':
+ code_list.append(background[v])
+ for o in opts:
+ if o in opt_dict:
+ code_list.append(opt_dict[o])
+ if 'noreset' not in opts:
+ text = text + '\x1b[%sm' % RESET
+ return ('\x1b[%sm' % ';'.join(code_list)) + text
+
+def make_style(opts=(), **kwargs):
+ """
+ Returns a function with default parameters for colorize()
+
+ Example:
+ bold_red = make_style(opts=('bold',), fg='red')
+ print bold_red('hello')
+ KEYWORD = make_style(fg='yellow')
+ COMMENT = make_style(fg='blue', opts=('bold',))
+ """
+ return lambda text: colorize(text, opts, **kwargs)
+
+NOCOLOR_PALETTE = 'nocolor'
+DARK_PALETTE = 'dark'
+LIGHT_PALETTE = 'light'
+
+PALETTES = {
+ NOCOLOR_PALETTE: {
+ 'ERROR': {},
+ 'NOTICE': {},
+ 'SQL_FIELD': {},
+ 'SQL_COLTYPE': {},
+ 'SQL_KEYWORD': {},
+ 'SQL_TABLE': {},
+ 'HTTP_INFO': {},
+ 'HTTP_SUCCESS': {},
+ 'HTTP_REDIRECT': {},
+ 'HTTP_NOT_MODIFIED': {},
+ 'HTTP_BAD_REQUEST': {},
+ 'HTTP_NOT_FOUND': {},
+ 'HTTP_SERVER_ERROR': {},
+ },
+ DARK_PALETTE: {
+ 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
+ 'NOTICE': { 'fg': 'red' },
+ 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
+ 'SQL_COLTYPE': { 'fg': 'green' },
+ 'SQL_KEYWORD': { 'fg': 'yellow' },
+ 'SQL_TABLE': { 'opts': ('bold',) },
+ 'HTTP_INFO': { 'opts': ('bold',) },
+ 'HTTP_SUCCESS': { },
+ 'HTTP_REDIRECT': { 'fg': 'green' },
+ 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' },
+ 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
+ 'HTTP_NOT_FOUND': { 'fg': 'yellow' },
+ 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
+ },
+ LIGHT_PALETTE: {
+ 'ERROR': { 'fg': 'red', 'opts': ('bold',) },
+ 'NOTICE': { 'fg': 'red' },
+ 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) },
+ 'SQL_COLTYPE': { 'fg': 'green' },
+ 'SQL_KEYWORD': { 'fg': 'blue' },
+ 'SQL_TABLE': { 'opts': ('bold',) },
+ 'HTTP_INFO': { 'opts': ('bold',) },
+ 'HTTP_SUCCESS': { },
+ 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) },
+ 'HTTP_NOT_MODIFIED': { 'fg': 'green' },
+ 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) },
+ 'HTTP_NOT_FOUND': { 'fg': 'red' },
+ 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
+ }
+}
+DEFAULT_PALETTE = DARK_PALETTE
+
+def parse_color_setting(config_string):
+ """Parse a DJANGO_COLORS environment variable to produce the system palette
+
+ The general form of a pallete definition is:
+
+ "palette;role=fg;role=fg/bg;role=fg,option,option;role=fg/bg,option,option"
+
+ where:
+ palette is a named palette; one of 'light', 'dark', or 'nocolor'.
+ role is a named style used by Django
+ fg is a background color.
+ bg is a background color.
+ option is a display options.
+
+ Specifying a named palette is the same as manually specifying the individual
+ definitions for each role. Any individual definitions following the pallete
+ definition will augment the base palette definition.
+
+ Valid roles:
+ 'error', 'notice', 'sql_field', 'sql_coltype', 'sql_keyword', 'sql_table',
+ 'http_info', 'http_success', 'http_redirect', 'http_bad_request',
+ 'http_not_found', 'http_server_error'
+
+ Valid colors:
+ 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
+
+ Valid options:
+ 'bold', 'underscore', 'blink', 'reverse', 'conceal'
+
+ """
+ if not config_string:
+ return PALETTES[DEFAULT_PALETTE]
+
+ # Split the color configuration into parts
+ parts = config_string.lower().split(';')
+ palette = PALETTES[NOCOLOR_PALETTE].copy()
+ for part in parts:
+ if part in PALETTES:
+ # A default palette has been specified
+ palette.update(PALETTES[part])
+ elif '=' in part:
+ # Process a palette defining string
+ definition = {}
+
+ # Break the definition into the role,
+ # plus the list of specific instructions.
+ # The role must be in upper case
+ role, instructions = part.split('=')
+ role = role.upper()
+
+ styles = instructions.split(',')
+ styles.reverse()
+
+ # The first instruction can contain a slash
+ # to break apart fg/bg.
+ colors = styles.pop().split('/')
+ colors.reverse()
+ fg = colors.pop()
+ if fg in color_names:
+ definition['fg'] = fg
+ if colors and colors[-1] in color_names:
+ definition['bg'] = colors[-1]
+
+ # All remaining instructions are options
+ opts = tuple(s for s in styles if s in opt_dict.keys())
+ if opts:
+ definition['opts'] = opts
+
+ # The nocolor palette has all available roles.
+ # Use that palette as the basis for determining
+ # if the role is valid.
+ if role in PALETTES[NOCOLOR_PALETTE] and definition:
+ palette[role] = definition
+
+ # If there are no colors specified, return the empty palette.
+ if palette == PALETTES[NOCOLOR_PALETTE]:
+ return None
+ return palette
diff --git a/google/appengine/_internal/django/utils/text.py b/google/appengine/_internal/django/utils/text.py
new file mode 100644
index 0000000..6b6a8ea
--- /dev/null
+++ b/google/appengine/_internal/django/utils/text.py
@@ -0,0 +1,282 @@
+import re
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.functional import allow_lazy
+from google.appengine._internal.django.utils.translation import ugettext_lazy
+from htmlentitydefs import name2codepoint
+
+# Capitalizes the first letter of a string.
+capfirst = lambda x: x and force_unicode(x)[0].upper() + force_unicode(x)[1:]
+capfirst = allow_lazy(capfirst, unicode)
+
+def wrap(text, width):
+ """
+ A word-wrap function that preserves existing line breaks and most spaces in
+ the text. Expects that existing line breaks are posix newlines.
+ """
+ text = force_unicode(text)
+ def _generator():
+ it = iter(text.split(' '))
+ word = it.next()
+ yield word
+ pos = len(word) - word.rfind('\n') - 1
+ for word in it:
+ if "\n" in word:
+ lines = word.split('\n')
+ else:
+ lines = (word,)
+ pos += len(lines[0]) + 1
+ if pos > width:
+ yield '\n'
+ pos = len(lines[-1])
+ else:
+ yield ' '
+ if len(lines) > 1:
+ pos = len(lines[-1])
+ yield word
+ return u''.join(_generator())
+wrap = allow_lazy(wrap, unicode)
+
+def truncate_words(s, num, end_text='...'):
+ """Truncates a string after a certain number of words. Takes an optional
+ argument of what should be used to notify that the string has been
+ truncated, defaulting to ellipsis (...)
+
+ Newlines in the string will be stripped.
+ """
+ s = force_unicode(s)
+ length = int(num)
+ words = s.split()
+ if len(words) > length:
+ words = words[:length]
+ if not words[-1].endswith(end_text):
+ words.append(end_text)
+ return u' '.join(words)
+truncate_words = allow_lazy(truncate_words, unicode)
+
+def truncate_html_words(s, num, end_text='...'):
+ """Truncates HTML to a certain number of words (not counting tags and
+ comments). Closes opened tags if they were correctly closed in the given
+ html. Takes an optional argument of what should be used to notify that the
+ string has been truncated, defaulting to ellipsis (...).
+
+ Newlines in the HTML are preserved.
+ """
+ s = force_unicode(s)
+ length = int(num)
+ if length <= 0:
+ return u''
+ html4_singlets = ('br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input')
+ # Set up regular expressions
+ re_words = re.compile(r'&.*?;|<.*?>|(\w[\w-]*)', re.U)
+ re_tag = re.compile(r'<(/)?([^ ]+?)(?: (/)| .*?)?>')
+ # Count non-HTML words and keep note of open tags
+ pos = 0
+ end_text_pos = 0
+ words = 0
+ open_tags = []
+ while words <= length:
+ m = re_words.search(s, pos)
+ if not m:
+ # Checked through whole string
+ break
+ pos = m.end(0)
+ if m.group(1):
+ # It's an actual non-HTML word
+ words += 1
+ if words == length:
+ end_text_pos = pos
+ continue
+ # Check for tag
+ tag = re_tag.match(m.group(0))
+ if not tag or end_text_pos:
+ # Don't worry about non tags or tags after our truncate point
+ continue
+ closing_tag, tagname, self_closing = tag.groups()
+ tagname = tagname.lower() # Element names are always case-insensitive
+ if self_closing or tagname in html4_singlets:
+ pass
+ elif closing_tag:
+ # Check for match in open tags list
+ try:
+ i = open_tags.index(tagname)
+ except ValueError:
+ pass
+ else:
+ # SGML: An end tag closes, back to the matching start tag, all unclosed intervening start tags with omitted end tags
+ open_tags = open_tags[i+1:]
+ else:
+ # Add it to the start of the open tags list
+ open_tags.insert(0, tagname)
+ if words <= length:
+ # Don't try to close tags if we don't need to truncate
+ return s
+ out = s[:end_text_pos]
+ if end_text:
+ out += ' ' + end_text
+ # Close any tags still open
+ for tag in open_tags:
+ out += '</%s>' % tag
+ # Return string
+ return out
+truncate_html_words = allow_lazy(truncate_html_words, unicode)
+
+def get_valid_filename(s):
+ """
+ Returns the given string converted to a string that can be used for a clean
+ filename. Specifically, leading and trailing spaces are removed; other
+ spaces are converted to underscores; and anything that is not a unicode
+ alphanumeric, dash, underscore, or dot, is removed.
+ >>> get_valid_filename("john's portrait in 2004.jpg")
+ u'johns_portrait_in_2004.jpg'
+ """
+ s = force_unicode(s).strip().replace(' ', '_')
+ return re.sub(r'(?u)[^-\w.]', '', s)
+get_valid_filename = allow_lazy(get_valid_filename, unicode)
+
+def get_text_list(list_, last_word=ugettext_lazy(u'or')):
+ """
+ >>> get_text_list(['a', 'b', 'c', 'd'])
+ u'a, b, c or d'
+ >>> get_text_list(['a', 'b', 'c'], 'and')
+ u'a, b and c'
+ >>> get_text_list(['a', 'b'], 'and')
+ u'a and b'
+ >>> get_text_list(['a'])
+ u'a'
+ >>> get_text_list([])
+ u''
+ """
+ if len(list_) == 0: return u''
+ if len(list_) == 1: return force_unicode(list_[0])
+ return u'%s %s %s' % (', '.join([force_unicode(i) for i in list_][:-1]), force_unicode(last_word), force_unicode(list_[-1]))
+get_text_list = allow_lazy(get_text_list, unicode)
+
+def normalize_newlines(text):
+ return force_unicode(re.sub(r'\r\n|\r|\n', '\n', text))
+normalize_newlines = allow_lazy(normalize_newlines, unicode)
+
+def recapitalize(text):
+ "Recapitalizes text, placing caps after end-of-sentence punctuation."
+ text = force_unicode(text).lower()
+ capsRE = re.compile(r'(?:^|(?<=[\.\?\!] ))([a-z])')
+ text = capsRE.sub(lambda x: x.group(1).upper(), text)
+ return text
+recapitalize = allow_lazy(recapitalize)
+
+def phone2numeric(phone):
+ "Converts a phone number with letters into its numeric equivalent."
+ letters = re.compile(r'[A-Z]', re.I)
+ char2number = lambda m: {'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3',
+ 'f': '3', 'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5',
+ 'm': '6', 'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7',
+ 't': '8', 'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9',
+ }.get(m.group(0).lower())
+ return letters.sub(char2number, phone)
+phone2numeric = allow_lazy(phone2numeric)
+
+# From http://www.xhaus.com/alan/python/httpcomp.html#gzip
+# Used with permission.
+def compress_string(s):
+ import cStringIO, gzip
+ zbuf = cStringIO.StringIO()
+ zfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
+ zfile.write(s)
+ zfile.close()
+ return zbuf.getvalue()
+
+ustring_re = re.compile(u"([\u0080-\uffff])")
+
+def javascript_quote(s, quote_double_quotes=False):
+
+ def fix(match):
+ return r"\u%04x" % ord(match.group(1))
+
+ if type(s) == str:
+ s = s.decode('utf-8')
+ elif type(s) != unicode:
+ raise TypeError(s)
+ s = s.replace('\\', '\\\\')
+ s = s.replace('\r', '\\r')
+ s = s.replace('\n', '\\n')
+ s = s.replace('\t', '\\t')
+ s = s.replace("'", "\\'")
+ if quote_double_quotes:
+ s = s.replace('"', '"')
+ return str(ustring_re.sub(fix, s))
+javascript_quote = allow_lazy(javascript_quote, unicode)
+
+# Expression to match some_token and some_token="with spaces" (and similarly
+# for single-quoted strings).
+smart_split_re = re.compile(r"""
+ ((?:
+ [^\s'"]*
+ (?:
+ (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*')
+ [^\s'"]*
+ )+
+ ) | \S+)
+""", re.VERBOSE)
+
+def smart_split(text):
+ r"""
+ Generator that splits a string by spaces, leaving quoted phrases together.
+ Supports both single and double quotes, and supports escaping quotes with
+ backslashes. In the output, strings will keep their initial and trailing
+ quote marks and escaped quotes will remain escaped (the results can then
+ be further processed with unescape_string_literal()).
+
+ >>> list(smart_split(r'This is "a person\'s" test.'))
+ [u'This', u'is', u'"a person\\\'s"', u'test.']
+ >>> list(smart_split(r"Another 'person\'s' test."))
+ [u'Another', u"'person\\'s'", u'test.']
+ >>> list(smart_split(r'A "\"funky\" style" test.'))
+ [u'A', u'"\\"funky\\" style"', u'test.']
+ """
+ text = force_unicode(text)
+ for bit in smart_split_re.finditer(text):
+ yield bit.group(0)
+smart_split = allow_lazy(smart_split, unicode)
+
+def _replace_entity(match):
+ text = match.group(1)
+ if text[0] == u'#':
+ text = text[1:]
+ try:
+ if text[0] in u'xX':
+ c = int(text[1:], 16)
+ else:
+ c = int(text)
+ return unichr(c)
+ except ValueError:
+ return match.group(0)
+ else:
+ try:
+ return unichr(name2codepoint[text])
+ except (ValueError, KeyError):
+ return match.group(0)
+
+_entity_re = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));")
+
+def unescape_entities(text):
+ return _entity_re.sub(_replace_entity, text)
+unescape_entities = allow_lazy(unescape_entities, unicode)
+
+def unescape_string_literal(s):
+ r"""
+ Convert quoted string literals to unquoted strings with escaped quotes and
+ backslashes unquoted::
+
+ >>> unescape_string_literal('"abc"')
+ 'abc'
+ >>> unescape_string_literal("'abc'")
+ 'abc'
+ >>> unescape_string_literal('"a \"bc\""')
+ 'a "bc"'
+ >>> unescape_string_literal("'\'ab\' c'")
+ "'ab' c"
+ """
+ if s[0] not in "\"'" or s[-1] != s[0]:
+ raise ValueError("Not a string literal: %r" % s)
+ quote = s[0]
+ return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\')
+unescape_string_literal = allow_lazy(unescape_string_literal)
diff --git a/google/appengine/_internal/django/utils/thread_support.py b/google/appengine/_internal/django/utils/thread_support.py
new file mode 100644
index 0000000..2b97d16
--- /dev/null
+++ b/google/appengine/_internal/django/utils/thread_support.py
@@ -0,0 +1,12 @@
+"""
+Code used in a couple of places to work with the current thread's environment.
+Current users include i18n and request prefix handling.
+"""
+
+try:
+ import threading
+ currentThread = threading.currentThread
+except ImportError:
+ def currentThread():
+ return "no threading"
+
diff --git a/google/appengine/_internal/django/utils/timesince.py b/google/appengine/_internal/django/utils/timesince.py
new file mode 100644
index 0000000..44f3215
--- /dev/null
+++ b/google/appengine/_internal/django/utils/timesince.py
@@ -0,0 +1,69 @@
+import datetime
+import time
+
+from google.appengine._internal.django.utils.tzinfo import LocalTimezone
+from google.appengine._internal.django.utils.translation import ungettext, ugettext
+
+def timesince(d, now=None):
+ """
+ Takes two datetime objects and returns the time between d and now
+ as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
+ then "0 minutes" is returned.
+
+ Units used are years, months, weeks, days, hours, and minutes.
+ Seconds and microseconds are ignored. Up to two adjacent units will be
+ displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
+ possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
+
+ Adapted from http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
+ """
+ chunks = (
+ (60 * 60 * 24 * 365, lambda n: ungettext('year', 'years', n)),
+ (60 * 60 * 24 * 30, lambda n: ungettext('month', 'months', n)),
+ (60 * 60 * 24 * 7, lambda n : ungettext('week', 'weeks', n)),
+ (60 * 60 * 24, lambda n : ungettext('day', 'days', n)),
+ (60 * 60, lambda n: ungettext('hour', 'hours', n)),
+ (60, lambda n: ungettext('minute', 'minutes', n))
+ )
+ # Convert datetime.date to datetime.datetime for comparison.
+ if not isinstance(d, datetime.datetime):
+ d = datetime.datetime(d.year, d.month, d.day)
+ if now and not isinstance(now, datetime.datetime):
+ now = datetime.datetime(now.year, now.month, now.day)
+
+ if not now:
+ if d.tzinfo:
+ now = datetime.datetime.now(LocalTimezone(d))
+ else:
+ now = datetime.datetime.now()
+
+ # ignore microsecond part of 'd' since we removed it from 'now'
+ delta = now - (d - datetime.timedelta(0, 0, d.microsecond))
+ since = delta.days * 24 * 60 * 60 + delta.seconds
+ if since <= 0:
+ # d is in the future compared to now, stop processing.
+ return u'0 ' + ugettext('minutes')
+ for i, (seconds, name) in enumerate(chunks):
+ count = since // seconds
+ if count != 0:
+ break
+ s = ugettext('%(number)d %(type)s') % {'number': count, 'type': name(count)}
+ if i + 1 < len(chunks):
+ # Now get the second item
+ seconds2, name2 = chunks[i + 1]
+ count2 = (since - (seconds * count)) // seconds2
+ if count2 != 0:
+ s += ugettext(', %(number)d %(type)s') % {'number': count2, 'type': name2(count2)}
+ return s
+
+def timeuntil(d, now=None):
+ """
+ Like timesince, but returns a string measuring the time until
+ the given time.
+ """
+ if not now:
+ if getattr(d, 'tzinfo', None):
+ now = datetime.datetime.now(LocalTimezone(d))
+ else:
+ now = datetime.datetime.now()
+ return timesince(now, d)
diff --git a/google/appengine/_internal/django/utils/translation/__init__.py b/google/appengine/_internal/django/utils/translation/__init__.py
new file mode 100644
index 0000000..ad5f77c
--- /dev/null
+++ b/google/appengine/_internal/django/utils/translation/__init__.py
@@ -0,0 +1,104 @@
+"""
+Internationalization support.
+"""
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.functional import lazy, curry
+
+
+__all__ = ['gettext', 'gettext_noop', 'gettext_lazy', 'ngettext',
+ 'ngettext_lazy', 'string_concat', 'activate', 'deactivate',
+ 'get_language', 'get_language_bidi', 'get_date_formats',
+ 'get_partial_date_formats', 'check_for_language', 'to_locale',
+ 'get_language_from_request', 'templatize', 'ugettext', 'ugettext_lazy',
+ 'ungettext', 'deactivate_all']
+
+# Here be dragons, so a short explanation of the logic won't hurt:
+# We are trying to solve two problems: (1) access settings, in particular
+# settings.USE_I18N, as late as possible, so that modules can be imported
+# without having to first configure Django, and (2) if some other code creates
+# a reference to one of these functions, don't break that reference when we
+# replace the functions with their real counterparts (once we do access the
+# settings).
+
+def delayed_loader(real_name, *args, **kwargs):
+ """
+ Call the real, underlying function. We have a level of indirection here so
+ that modules can use the translation bits without actually requiring
+ Django's settings bits to be configured before import.
+ """
+ from google.appengine._internal.django.conf import settings
+ if settings.USE_I18N:
+ from google.appengine._internal.django.utils.translation import trans_real as trans
+ else:
+ from google.appengine._internal.django.utils.translation import trans_null as trans
+
+ # Make the originally requested function call on the way out the door.
+ return getattr(trans, real_name)(*args, **kwargs)
+
+g = globals()
+for name in __all__:
+ g['real_%s' % name] = curry(delayed_loader, name)
+del g, delayed_loader
+
+def gettext_noop(message):
+ return real_gettext_noop(message)
+
+ugettext_noop = gettext_noop
+
+def gettext(message):
+ return real_gettext(message)
+
+def ngettext(singular, plural, number):
+ return real_ngettext(singular, plural, number)
+
+def ugettext(message):
+ return real_ugettext(message)
+
+def ungettext(singular, plural, number):
+ return real_ungettext(singular, plural, number)
+
+ngettext_lazy = lazy(ngettext, str)
+gettext_lazy = lazy(gettext, str)
+ungettext_lazy = lazy(ungettext, unicode)
+ugettext_lazy = lazy(ugettext, unicode)
+
+def activate(language):
+ return real_activate(language)
+
+def deactivate():
+ return real_deactivate()
+
+def get_language():
+ return real_get_language()
+
+def get_language_bidi():
+ return real_get_language_bidi()
+
+def get_date_formats():
+ return real_get_date_formats()
+
+def get_partial_date_formats():
+ return real_get_partial_date_formats()
+
+def check_for_language(lang_code):
+ return real_check_for_language(lang_code)
+
+def to_locale(language):
+ return real_to_locale(language)
+
+def get_language_from_request(request):
+ return real_get_language_from_request(request)
+
+def templatize(src):
+ return real_templatize(src)
+
+def deactivate_all():
+ return real_deactivate_all()
+
+def _string_concat(*strings):
+ """
+ Lazy variant of string concatenation, needed for translations that are
+ constructed from multiple parts.
+ """
+ return u''.join([force_unicode(s) for s in strings])
+string_concat = lazy(_string_concat, unicode)
diff --git a/google/appengine/_internal/django/utils/translation/trans_null.py b/google/appengine/_internal/django/utils/translation/trans_null.py
new file mode 100644
index 0000000..bda127e
--- /dev/null
+++ b/google/appengine/_internal/django/utils/translation/trans_null.py
@@ -0,0 +1,72 @@
+# These are versions of the functions in django.utils.translation.trans_real
+# that don't actually do anything. This is purely for performance, so that
+# settings.USE_I18N = False can use this module rather than trans_real.py.
+
+import warnings
+from google.appengine._internal.django.conf import settings
+from google.appengine._internal.django.utils.encoding import force_unicode
+from google.appengine._internal.django.utils.safestring import mark_safe, SafeData
+
+def ngettext(singular, plural, number):
+ if number == 1: return singular
+ return plural
+ngettext_lazy = ngettext
+
+def ungettext(singular, plural, number):
+ return force_unicode(ngettext(singular, plural, number))
+
+activate = lambda x: None
+deactivate = deactivate_all = lambda: None
+get_language = lambda: settings.LANGUAGE_CODE
+get_language_bidi = lambda: settings.LANGUAGE_CODE in settings.LANGUAGES_BIDI
+check_for_language = lambda x: True
+
+# date formats shouldn't be used using gettext anymore. This
+# is kept for backward compatibility
+TECHNICAL_ID_MAP = {
+ "DATE_WITH_TIME_FULL": settings.DATETIME_FORMAT,
+ "DATE_FORMAT": settings.DATE_FORMAT,
+ "DATETIME_FORMAT": settings.DATETIME_FORMAT,
+ "TIME_FORMAT": settings.TIME_FORMAT,
+ "YEAR_MONTH_FORMAT": settings.YEAR_MONTH_FORMAT,
+ "MONTH_DAY_FORMAT": settings.MONTH_DAY_FORMAT,
+}
+
+def gettext(message):
+ result = TECHNICAL_ID_MAP.get(message, message)
+ if isinstance(message, SafeData):
+ return mark_safe(result)
+ return result
+
+def ugettext(message):
+ return force_unicode(gettext(message))
+
+gettext_noop = gettext_lazy = _ = gettext
+
+def to_locale(language):
+ p = language.find('-')
+ if p >= 0:
+ return language[:p].lower()+'_'+language[p+1:].upper()
+ else:
+ return language.lower()
+
+def get_language_from_request(request):
+ return settings.LANGUAGE_CODE
+
+# get_date_formats and get_partial_date_formats aren't used anymore by Django
+# but are kept for backward compatibility.
+def get_date_formats():
+ warnings.warn(
+ '`django.utils.translation.get_date_formats` is deprecated. '
+ 'Please update your code to use the new i18n aware formatting.',
+ PendingDeprecationWarning
+ )
+ return settings.DATE_FORMAT, settings.DATETIME_FORMAT, settings.TIME_FORMAT
+
+def get_partial_date_formats():
+ warnings.warn(
+ '`django.utils.translation.get_partial_date_formats` is deprecated. '
+ 'Please update your code to use the new i18n aware formatting.',
+ PendingDeprecationWarning
+ )
+ return settings.YEAR_MONTH_FORMAT, settings.MONTH_DAY_FORMAT
diff --git a/google/appengine/_internal/django/utils/translation/trans_real.py b/google/appengine/_internal/django/utils/translation/trans_real.py
new file mode 100644
index 0000000..b757472
--- /dev/null
+++ b/google/appengine/_internal/django/utils/translation/trans_real.py
@@ -0,0 +1,550 @@
+"""Translation helper functions."""
+
+import locale
+import os
+import re
+import sys
+import warnings
+import gettext as gettext_module
+from cStringIO import StringIO
+
+from google.appengine._internal.django.utils.importlib import import_module
+from google.appengine._internal.django.utils.safestring import mark_safe, SafeData
+from google.appengine._internal.django.utils.thread_support import currentThread
+
+# Translations are cached in a dictionary for every language+app tuple.
+# The active translations are stored by threadid to make them thread local.
+_translations = {}
+_active = {}
+
+# The default translation is based on the settings file.
+_default = None
+
+# This is a cache for normalized accept-header languages to prevent multiple
+# file lookups when checking the same locale on repeated requests.
+_accepted = {}
+
+# Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9.
+accept_language_re = re.compile(r'''
+ ([A-Za-z]{1,8}(?:-[A-Za-z]{1,8})*|\*) # "en", "en-au", "x-y-z", "*"
+ (?:;q=(0(?:\.\d{,3})?|1(?:.0{,3})?))? # Optional "q=1.00", "q=0.8"
+ (?:\s*,\s*|$) # Multiple accepts per header.
+ ''', re.VERBOSE)
+
+def to_locale(language, to_lower=False):
+ """
+ Turns a language name (en-us) into a locale name (en_US). If 'to_lower' is
+ True, the last component is lower-cased (en_us).
+ """
+ p = language.find('-')
+ if p >= 0:
+ if to_lower:
+ return language[:p].lower()+'_'+language[p+1:].lower()
+ else:
+ # Get correct locale for sr-latn
+ if len(language[p+1:]) > 2:
+ return language[:p].lower()+'_'+language[p+1].upper()+language[p+2:].lower()
+ return language[:p].lower()+'_'+language[p+1:].upper()
+ else:
+ return language.lower()
+
+def to_language(locale):
+ """Turns a locale name (en_US) into a language name (en-us)."""
+ p = locale.find('_')
+ if p >= 0:
+ return locale[:p].lower()+'-'+locale[p+1:].lower()
+ else:
+ return locale.lower()
+
+class DjangoTranslation(gettext_module.GNUTranslations):
+ """
+ This class sets up the GNUTranslations context with regard to output
+ charset. Django uses a defined DEFAULT_CHARSET as the output charset on
+ Python 2.4.
+ """
+ def __init__(self, *args, **kw):
+ from google.appengine._internal.django.conf import settings
+ gettext_module.GNUTranslations.__init__(self, *args, **kw)
+ # Starting with Python 2.4, there's a function to define
+ # the output charset. Before 2.4, the output charset is
+ # identical with the translation file charset.
+ try:
+ self.set_output_charset('utf-8')
+ except AttributeError:
+ pass
+ self.django_output_charset = 'utf-8'
+ self.__language = '??'
+
+ def merge(self, other):
+ self._catalog.update(other._catalog)
+
+ def set_language(self, language):
+ self.__language = language
+
+ def language(self):
+ return self.__language
+
+ def __repr__(self):
+ return "<DjangoTranslation lang:%s>" % self.__language
+
+def translation(language):
+ """
+ Returns a translation object.
+
+ This translation object will be constructed out of multiple GNUTranslations
+ objects by merging their catalogs. It will construct a object for the
+ requested language and add a fallback to the default language, if it's
+ different from the requested language.
+ """
+ global _translations
+
+ t = _translations.get(language, None)
+ if t is not None:
+ return t
+
+ from google.appengine._internal.django.conf import settings
+
+ globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale')
+
+ if settings.SETTINGS_MODULE is not None:
+ parts = settings.SETTINGS_MODULE.split('.')
+ project = import_module(parts[0])
+ projectpath = os.path.join(os.path.dirname(project.__file__), 'locale')
+ else:
+ projectpath = None
+
+ def _fetch(lang, fallback=None):
+
+ global _translations
+
+ loc = to_locale(lang)
+
+ res = _translations.get(lang, None)
+ if res is not None:
+ return res
+
+ def _translation(path):
+ try:
+ t = gettext_module.translation('django', path, [loc], DjangoTranslation)
+ t.set_language(lang)
+ return t
+ except IOError, e:
+ return None
+
+ res = _translation(globalpath)
+
+ # We want to ensure that, for example, "en-gb" and "en-us" don't share
+ # the same translation object (thus, merging en-us with a local update
+ # doesn't affect en-gb), even though they will both use the core "en"
+ # translation. So we have to subvert Python's internal gettext caching.
+ base_lang = lambda x: x.split('-', 1)[0]
+ if base_lang(lang) in [base_lang(trans) for trans in _translations]:
+ res._info = res._info.copy()
+ res._catalog = res._catalog.copy()
+
+ def _merge(path):
+ t = _translation(path)
+ if t is not None:
+ if res is None:
+ return t
+ else:
+ res.merge(t)
+ return res
+
+ for localepath in settings.LOCALE_PATHS:
+ if os.path.isdir(localepath):
+ res = _merge(localepath)
+
+ for appname in settings.INSTALLED_APPS:
+ app = import_module(appname)
+ apppath = os.path.join(os.path.dirname(app.__file__), 'locale')
+
+ if os.path.isdir(apppath):
+ res = _merge(apppath)
+
+ if projectpath and os.path.isdir(projectpath):
+ res = _merge(projectpath)
+
+ if res is None:
+ if fallback is not None:
+ res = fallback
+ else:
+ return gettext_module.NullTranslations()
+ _translations[lang] = res
+ return res
+
+ default_translation = _fetch(settings.LANGUAGE_CODE)
+ current_translation = _fetch(language, fallback=default_translation)
+
+ return current_translation
+
+def activate(language):
+ """
+ Fetches the translation object for a given tuple of application name and
+ language and installs it as the current translation object for the current
+ thread.
+ """
+ if isinstance(language, basestring) and language == 'no':
+ warnings.warn(
+ "The use of the language code 'no' is deprecated. "
+ "Please use the 'nb' translation instead.",
+ PendingDeprecationWarning
+ )
+ _active[currentThread()] = translation(language)
+
+def deactivate():
+ """
+ Deinstalls the currently active translation object so that further _ calls
+ will resolve against the default translation object, again.
+ """
+ global _active
+ if currentThread() in _active:
+ del _active[currentThread()]
+
+def deactivate_all():
+ """
+ Makes the active translation object a NullTranslations() instance. This is
+ useful when we want delayed translations to appear as the original string
+ for some reason.
+ """
+ _active[currentThread()] = gettext_module.NullTranslations()
+
+def get_language():
+ """Returns the currently selected language."""
+ t = _active.get(currentThread(), None)
+ if t is not None:
+ try:
+ return to_language(t.language())
+ except AttributeError:
+ pass
+ # If we don't have a real translation object, assume it's the default language.
+ from google.appengine._internal.django.conf import settings
+ return settings.LANGUAGE_CODE
+
+def get_language_bidi():
+ """
+ Returns selected language's BiDi layout.
+
+ * False = left-to-right layout
+ * True = right-to-left layout
+ """
+ from google.appengine._internal.django.conf import settings
+
+ base_lang = get_language().split('-')[0]
+ return base_lang in settings.LANGUAGES_BIDI
+
+def catalog():
+ """
+ Returns the current active catalog for further processing.
+ This can be used if you need to modify the catalog or want to access the
+ whole message catalog instead of just translating one string.
+ """
+ global _default, _active
+ t = _active.get(currentThread(), None)
+ if t is not None:
+ return t
+ if _default is None:
+ from google.appengine._internal.django.conf import settings
+ _default = translation(settings.LANGUAGE_CODE)
+ return _default
+
+def do_translate(message, translation_function):
+ """
+ Translates 'message' using the given 'translation_function' name -- which
+ will be either gettext or ugettext. It uses the current thread to find the
+ translation object to use. If no current translation is activated, the
+ message will be run through the default translation object.
+ """
+ eol_message = message.replace('\r\n', '\n').replace('\r', '\n')
+ global _default, _active
+ t = _active.get(currentThread(), None)
+ if t is not None:
+ result = getattr(t, translation_function)(eol_message)
+ else:
+ if _default is None:
+ from google.appengine._internal.django.conf import settings
+ _default = translation(settings.LANGUAGE_CODE)
+ result = getattr(_default, translation_function)(eol_message)
+ if isinstance(message, SafeData):
+ return mark_safe(result)
+ return result
+
+def gettext(message):
+ return do_translate(message, 'gettext')
+
+def ugettext(message):
+ return do_translate(message, 'ugettext')
+
+def gettext_noop(message):
+ """
+ Marks strings for translation but doesn't translate them now. This can be
+ used to store strings in global variables that should stay in the base
+ language (because they might be used externally) and will be translated
+ later.
+ """
+ return message
+
+def do_ntranslate(singular, plural, number, translation_function):
+ global _default, _active
+
+ t = _active.get(currentThread(), None)
+ if t is not None:
+ return getattr(t, translation_function)(singular, plural, number)
+ if _default is None:
+ from google.appengine._internal.django.conf import settings
+ _default = translation(settings.LANGUAGE_CODE)
+ return getattr(_default, translation_function)(singular, plural, number)
+
+def ngettext(singular, plural, number):
+ """
+ Returns a UTF-8 bytestring of the translation of either the singular or
+ plural, based on the number.
+ """
+ return do_ntranslate(singular, plural, number, 'ngettext')
+
+def ungettext(singular, plural, number):
+ """
+ Returns a unicode strings of the translation of either the singular or
+ plural, based on the number.
+ """
+ return do_ntranslate(singular, plural, number, 'ungettext')
+
+def check_for_language(lang_code):
+ """
+ Checks whether there is a global language file for the given language
+ code. This is used to decide whether a user-provided language is
+ available. This is only used for language codes from either the cookies or
+ session.
+ """
+ from google.appengine._internal.django.conf import settings
+ globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale')
+ if gettext_module.find('django', globalpath, [to_locale(lang_code)]) is not None:
+ return True
+ else:
+ return False
+
+def get_language_from_request(request):
+ """
+ Analyzes the request to find what language the user wants the system to
+ show. Only languages listed in settings.LANGUAGES are taken into account.
+ If the user requests a sublanguage where we have a main language, we send
+ out the main language.
+ """
+ global _accepted
+ from google.appengine._internal.django.conf import settings
+ globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale')
+ supported = dict(settings.LANGUAGES)
+
+ if hasattr(request, 'session'):
+ lang_code = request.session.get('django_language', None)
+ if lang_code in supported and lang_code is not None and check_for_language(lang_code):
+ return lang_code
+
+ lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME)
+
+ if lang_code and lang_code not in supported:
+ lang_code = lang_code.split('-')[0] # e.g. if fr-ca is not supported fallback to fr
+
+ if lang_code and lang_code in supported and check_for_language(lang_code):
+ return lang_code
+
+ accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
+ for accept_lang, unused in parse_accept_lang_header(accept):
+ if accept_lang == '*':
+ break
+
+ # We have a very restricted form for our language files (no encoding
+ # specifier, since they all must be UTF-8 and only one possible
+ # language each time. So we avoid the overhead of gettext.find() and
+ # work out the MO file manually.
+
+ # 'normalized' is the root name of the locale in POSIX format (which is
+ # the format used for the directories holding the MO files).
+ normalized = locale.locale_alias.get(to_locale(accept_lang, True))
+ if not normalized:
+ continue
+ # Remove the default encoding from locale_alias.
+ normalized = normalized.split('.')[0]
+
+ if normalized in _accepted:
+ # We've seen this locale before and have an MO file for it, so no
+ # need to check again.
+ return _accepted[normalized]
+
+ for lang, dirname in ((accept_lang, normalized),
+ (accept_lang.split('-')[0], normalized.split('_')[0])):
+ if lang.lower() not in supported:
+ continue
+ langfile = os.path.join(globalpath, dirname, 'LC_MESSAGES',
+ 'django.mo')
+ if os.path.exists(langfile):
+ _accepted[normalized] = lang
+ return lang
+
+ return settings.LANGUAGE_CODE
+
+dot_re = re.compile(r'\S')
+def blankout(src, char):
+ """
+ Changes every non-whitespace character to the given char.
+ Used in the templatize function.
+ """
+ return dot_re.sub(char, src)
+
+inline_re = re.compile(r"""^\s*trans\s+((?:".*?")|(?:'.*?'))\s*""")
+block_re = re.compile(r"""^\s*blocktrans(?:\s+|$)""")
+endblock_re = re.compile(r"""^\s*endblocktrans$""")
+plural_re = re.compile(r"""^\s*plural$""")
+constant_re = re.compile(r"""_\(((?:".*?")|(?:'.*?'))\)""")
+
+def templatize(src):
+ """
+ Turns a Django template into something that is understood by xgettext. It
+ does so by translating the Django translation tags into standard gettext
+ function invocations.
+ """
+ from google.appengine._internal.django.template import Lexer, TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK
+ out = StringIO()
+ intrans = False
+ inplural = False
+ singular = []
+ plural = []
+ for t in Lexer(src, None).tokenize():
+ if intrans:
+ if t.token_type == TOKEN_BLOCK:
+ endbmatch = endblock_re.match(t.contents)
+ pluralmatch = plural_re.match(t.contents)
+ if endbmatch:
+ if inplural:
+ out.write(' ngettext(%r,%r,count) ' % (''.join(singular), ''.join(plural)))
+ for part in singular:
+ out.write(blankout(part, 'S'))
+ for part in plural:
+ out.write(blankout(part, 'P'))
+ else:
+ out.write(' gettext(%r) ' % ''.join(singular))
+ for part in singular:
+ out.write(blankout(part, 'S'))
+ intrans = False
+ inplural = False
+ singular = []
+ plural = []
+ elif pluralmatch:
+ inplural = True
+ else:
+ raise SyntaxError("Translation blocks must not include other block tags: %s" % t.contents)
+ elif t.token_type == TOKEN_VAR:
+ if inplural:
+ plural.append('%%(%s)s' % t.contents)
+ else:
+ singular.append('%%(%s)s' % t.contents)
+ elif t.token_type == TOKEN_TEXT:
+ contents = t.contents.replace('%', '%%')
+ if inplural:
+ plural.append(contents)
+ else:
+ singular.append(contents)
+ else:
+ if t.token_type == TOKEN_BLOCK:
+ imatch = inline_re.match(t.contents)
+ bmatch = block_re.match(t.contents)
+ cmatches = constant_re.findall(t.contents)
+ if imatch:
+ g = imatch.group(1)
+ if g[0] == '"': g = g.strip('"')
+ elif g[0] == "'": g = g.strip("'")
+ out.write(' gettext(%r) ' % g)
+ elif bmatch:
+ for fmatch in constant_re.findall(t.contents):
+ out.write(' _(%s) ' % fmatch)
+ intrans = True
+ inplural = False
+ singular = []
+ plural = []
+ elif cmatches:
+ for cmatch in cmatches:
+ out.write(' _(%s) ' % cmatch)
+ else:
+ out.write(blankout(t.contents, 'B'))
+ elif t.token_type == TOKEN_VAR:
+ parts = t.contents.split('|')
+ cmatch = constant_re.match(parts[0])
+ if cmatch:
+ out.write(' _(%s) ' % cmatch.group(1))
+ for p in parts[1:]:
+ if p.find(':_(') >= 0:
+ out.write(' %s ' % p.split(':',1)[1])
+ else:
+ out.write(blankout(p, 'F'))
+ else:
+ out.write(blankout(t.contents, 'X'))
+ return out.getvalue()
+
+def parse_accept_lang_header(lang_string):
+ """
+ Parses the lang_string, which is the body of an HTTP Accept-Language
+ header, and returns a list of (lang, q-value), ordered by 'q' values.
+
+ Any format errors in lang_string results in an empty list being returned.
+ """
+ result = []
+ pieces = accept_language_re.split(lang_string)
+ if pieces[-1]:
+ return []
+ for i in range(0, len(pieces) - 1, 3):
+ first, lang, priority = pieces[i : i + 3]
+ if first:
+ return []
+ priority = priority and float(priority) or 1.0
+ result.append((lang, priority))
+ result.sort(lambda x, y: -cmp(x[1], y[1]))
+ return result
+
+# get_date_formats and get_partial_date_formats aren't used anymore by Django
+# and are kept for backward compatibility.
+# Note, it's also important to keep format names marked for translation.
+# For compatibility we still want to have formats on translation catalogs.
+# That makes template code like {{ my_date|date:_('DATE_FORMAT') }} still work
+def get_date_formats():
+ """
+ Checks whether translation files provide a translation for some technical
+ message ID to store date and time formats. If it doesn't contain one, the
+ formats provided in the settings will be used.
+ """
+ warnings.warn(
+ "'django.utils.translation.get_date_formats' is deprecated. "
+ "Please update your code to use the new i18n aware formatting.",
+ PendingDeprecationWarning
+ )
+ from google.appengine._internal.django.conf import settings
+ date_format = ugettext('DATE_FORMAT')
+ datetime_format = ugettext('DATETIME_FORMAT')
+ time_format = ugettext('TIME_FORMAT')
+ if date_format == 'DATE_FORMAT':
+ date_format = settings.DATE_FORMAT
+ if datetime_format == 'DATETIME_FORMAT':
+ datetime_format = settings.DATETIME_FORMAT
+ if time_format == 'TIME_FORMAT':
+ time_format = settings.TIME_FORMAT
+ return date_format, datetime_format, time_format
+
+def get_partial_date_formats():
+ """
+ Checks whether translation files provide a translation for some technical
+ message ID to store partial date formats. If it doesn't contain one, the
+ formats provided in the settings will be used.
+ """
+ warnings.warn(
+ "'django.utils.translation.get_partial_date_formats' is deprecated. "
+ "Please update your code to use the new i18n aware formatting.",
+ PendingDeprecationWarning
+ )
+ from google.appengine._internal.django.conf import settings
+ year_month_format = ugettext('YEAR_MONTH_FORMAT')
+ month_day_format = ugettext('MONTH_DAY_FORMAT')
+ if year_month_format == 'YEAR_MONTH_FORMAT':
+ year_month_format = settings.YEAR_MONTH_FORMAT
+ if month_day_format == 'MONTH_DAY_FORMAT':
+ month_day_format = settings.MONTH_DAY_FORMAT
+ return year_month_format, month_day_format
+
diff --git a/google/appengine/_internal/django/utils/tree.py b/google/appengine/_internal/django/utils/tree.py
new file mode 100644
index 0000000..b309aa4
--- /dev/null
+++ b/google/appengine/_internal/django/utils/tree.py
@@ -0,0 +1,153 @@
+"""
+A class for storing a tree graph. Primarily used for filter constructs in the
+ORM.
+"""
+
+from google.appengine._internal.django.utils.copycompat import deepcopy
+
+class Node(object):
+ """
+ A single internal node in the tree graph. A Node should be viewed as a
+ connection (the root) with the children being either leaf nodes or other
+ Node instances.
+ """
+ # Standard connector type. Clients usually won't use this at all and
+ # subclasses will usually override the value.
+ default = 'DEFAULT'
+
+ def __init__(self, children=None, connector=None, negated=False):
+ """
+ Constructs a new Node. If no connector is given, the default will be
+ used.
+
+ Warning: You probably don't want to pass in the 'negated' parameter. It
+ is NOT the same as constructing a node and calling negate() on the
+ result.
+ """
+ self.children = children and children[:] or []
+ self.connector = connector or self.default
+ self.subtree_parents = []
+ self.negated = negated
+
+ # We need this because of django.db.models.query_utils.Q. Q. __init__() is
+ # problematic, but it is a natural Node subclass in all other respects.
+ def _new_instance(cls, children=None, connector=None, negated=False):
+ """
+ This is called to create a new instance of this class when we need new
+ Nodes (or subclasses) in the internal code in this class. Normally, it
+ just shadows __init__(). However, subclasses with an __init__ signature
+ that is not an extension of Node.__init__ might need to implement this
+ method to allow a Node to create a new instance of them (if they have
+ any extra setting up to do).
+ """
+ obj = Node(children, connector, negated)
+ obj.__class__ = cls
+ return obj
+ _new_instance = classmethod(_new_instance)
+
+ def __str__(self):
+ if self.negated:
+ return '(NOT (%s: %s))' % (self.connector, ', '.join([str(c) for c
+ in self.children]))
+ return '(%s: %s)' % (self.connector, ', '.join([str(c) for c in
+ self.children]))
+
+ def __deepcopy__(self, memodict):
+ """
+ Utility method used by copy.deepcopy().
+ """
+ obj = Node(connector=self.connector, negated=self.negated)
+ obj.__class__ = self.__class__
+ obj.children = deepcopy(self.children, memodict)
+ obj.subtree_parents = deepcopy(self.subtree_parents, memodict)
+ return obj
+
+ def __len__(self):
+ """
+ The size of a node if the number of children it has.
+ """
+ return len(self.children)
+
+ def __nonzero__(self):
+ """
+ For truth value testing.
+ """
+ return bool(self.children)
+
+ def __contains__(self, other):
+ """
+ Returns True is 'other' is a direct child of this instance.
+ """
+ return other in self.children
+
+ def add(self, node, conn_type):
+ """
+ Adds a new node to the tree. If the conn_type is the same as the root's
+ current connector type, the node is added to the first level.
+ Otherwise, the whole tree is pushed down one level and a new root
+ connector is created, connecting the existing tree and the new node.
+ """
+ if node in self.children and conn_type == self.connector:
+ return
+ if len(self.children) < 2:
+ self.connector = conn_type
+ if self.connector == conn_type:
+ if isinstance(node, Node) and (node.connector == conn_type or
+ len(node) == 1):
+ self.children.extend(node.children)
+ else:
+ self.children.append(node)
+ else:
+ obj = self._new_instance(self.children, self.connector,
+ self.negated)
+ self.connector = conn_type
+ self.children = [obj, node]
+
+ def negate(self):
+ """
+ Negate the sense of the root connector. This reorganises the children
+ so that the current node has a single child: a negated node containing
+ all the previous children. This slightly odd construction makes adding
+ new children behave more intuitively.
+
+ Interpreting the meaning of this negate is up to client code. This
+ method is useful for implementing "not" arrangements.
+ """
+ self.children = [self._new_instance(self.children, self.connector,
+ not self.negated)]
+ self.connector = self.default
+
+ def start_subtree(self, conn_type):
+ """
+ Sets up internal state so that new nodes are added to a subtree of the
+ current node. The conn_type specifies how the sub-tree is joined to the
+ existing children.
+ """
+ if len(self.children) == 1:
+ self.connector = conn_type
+ elif self.connector != conn_type:
+ self.children = [self._new_instance(self.children, self.connector,
+ self.negated)]
+ self.connector = conn_type
+ self.negated = False
+
+ self.subtree_parents.append(self.__class__(self.children,
+ self.connector, self.negated))
+ self.connector = self.default
+ self.negated = False
+ self.children = []
+
+ def end_subtree(self):
+ """
+ Closes off the most recently unmatched start_subtree() call.
+
+ This puts the current state into a node of the parent tree and returns
+ the current instances state to be the parent.
+ """
+ obj = self.subtree_parents.pop()
+ node = self.__class__(self.children, self.connector)
+ self.connector = obj.connector
+ self.negated = obj.negated
+ self.children = obj.children
+ self.children.append(node)
+
diff --git a/google/appengine/_internal/django/utils/tzinfo.py b/google/appengine/_internal/django/utils/tzinfo.py
new file mode 100644
index 0000000..ec4ed65
--- /dev/null
+++ b/google/appengine/_internal/django/utils/tzinfo.py
@@ -0,0 +1,77 @@
+"Implementation of tzinfo classes for use with datetime.datetime."
+
+import time
+from datetime import timedelta, tzinfo
+from google.appengine._internal.django.utils.encoding import smart_unicode, smart_str, DEFAULT_LOCALE_ENCODING
+
+class FixedOffset(tzinfo):
+ "Fixed offset in minutes east from UTC."
+ def __init__(self, offset):
+ if isinstance(offset, timedelta):
+ self.__offset = offset
+ offset = self.__offset.seconds // 60
+ else:
+ self.__offset = timedelta(minutes=offset)
+
+ sign = offset < 0 and '-' or '+'
+ self.__name = u"%s%02d%02d" % (sign, abs(offset) / 60., abs(offset) % 60)
+
+ def __repr__(self):
+ return self.__name
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return self.__name
+
+ def dst(self, dt):
+ return timedelta(0)
+
+class LocalTimezone(tzinfo):
+ "Proxy timezone information from time module."
+ def __init__(self, dt):
+ tzinfo.__init__(self)
+ self._tzname = self.tzname(dt)
+
+ def __repr__(self):
+ return smart_str(self._tzname)
+
+ def utcoffset(self, dt):
+ if self._isdst(dt):
+ return timedelta(seconds=-time.altzone)
+ else:
+ return timedelta(seconds=-time.timezone)
+
+ def dst(self, dt):
+ if self._isdst(dt):
+ return timedelta(seconds=-time.altzone) - timedelta(seconds=-time.timezone)
+ else:
+ return timedelta(0)
+
+ def tzname(self, dt):
+ try:
+ return smart_unicode(time.tzname[self._isdst(dt)],
+ DEFAULT_LOCALE_ENCODING)
+ except UnicodeDecodeError:
+ return None
+
+ def _isdst(self, dt):
+ tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1)
+ try:
+ stamp = time.mktime(tt)
+ except (OverflowError, ValueError):
+ # 32 bit systems can't handle dates after Jan 2038, and certain
+ # systems can't handle dates before ~1901-12-01:
+ #
+ # >>> time.mktime((1900, 1, 13, 0, 0, 0, 0, 0, 0))
+ # OverflowError: mktime argument out of range
+ # >>> time.mktime((1850, 1, 13, 0, 0, 0, 0, 0, 0))
+ # ValueError: year out of range
+ #
+ # In this case, we fake the date, because we only care about the
+ # DST flag.
+ tt = (2037,) + tt[1:]
+ stamp = time.mktime(tt)
+ tt = time.localtime(stamp)
+ return tt.tm_isdst > 0
diff --git a/google/appengine/_internal/django/utils/version.py b/google/appengine/_internal/django/utils/version.py
new file mode 100644
index 0000000..69132fd
--- /dev/null
+++ b/google/appengine/_internal/django/utils/version.py
@@ -0,0 +1,42 @@
+from google.appengine._internal import django
+import os.path
+import re
+
+def get_svn_revision(path=None):
+ """
+ Returns the SVN revision in the form SVN-XXXX,
+ where XXXX is the revision number.
+
+ Returns SVN-unknown if anything goes wrong, such as an unexpected
+ format of internal SVN files.
+
+ If path is provided, it should be a directory whose SVN info you want to
+ inspect. If it's not provided, this will use the root django/ package
+ directory.
+ """
+ rev = None
+ if path is None:
+ path = django.__path__[0]
+ entries_path = '%s/.svn/entries' % path
+
+ try:
+ entries = open(entries_path, 'r').read()
+ except IOError:
+ pass
+ else:
+ # Versions >= 7 of the entries file are flat text. The first line is
+ # the version number. The next set of digits after 'dir' is the revision.
+ if re.match('(\d+)', entries):
+ rev_match = re.search('\d+\s+dir\s+(\d+)', entries)
+ if rev_match:
+ rev = rev_match.groups()[0]
+ # Older XML versions of the file specify revision as an attribute of
+ # the first entries node.
+ else:
+ from xml.dom import minidom
+ dom = minidom.parse(entries_path)
+ rev = dom.getElementsByTagName('entry')[0].getAttribute('revision')
+
+ if rev:
+ return u'SVN-%s' % rev
+ return u'SVN-unknown'
diff --git a/google/appengine/_internal/django/utils/xmlutils.py b/google/appengine/_internal/django/utils/xmlutils.py
new file mode 100644
index 0000000..a1eb5fb
--- /dev/null
+++ b/google/appengine/_internal/django/utils/xmlutils.py
@@ -0,0 +1,14 @@
+"""
+Utilities for XML generation/parsing.
+"""
+
+from xml.sax.saxutils import XMLGenerator
+
+class SimplerXMLGenerator(XMLGenerator):
+ def addQuickElement(self, name, contents=None, attrs=None):
+ "Convenience method for adding an element with no children"
+ if attrs is None: attrs = {}
+ self.startElement(name, attrs)
+ if contents is not None:
+ self.characters(contents)
+ self.endElement(name)
diff --git a/google/appengine/_internal/graphy/__init__.py b/google/appengine/_internal/graphy/__init__.py
new file mode 100644
index 0000000..6cd828b
--- /dev/null
+++ b/google/appengine/_internal/graphy/__init__.py
@@ -0,0 +1 @@
+__version__='1.0'
diff --git a/google/appengine/_internal/graphy/backends/__init__.py b/google/appengine/_internal/graphy/backends/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/google/appengine/_internal/graphy/backends/__init__.py
diff --git a/google/appengine/_internal/graphy/backends/google_chart_api/__init__.py b/google/appengine/_internal/graphy/backends/google_chart_api/__init__.py
new file mode 100644
index 0000000..011edfe
--- /dev/null
+++ b/google/appengine/_internal/graphy/backends/google_chart_api/__init__.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Backend which can generate charts using the Google Chart API."""
+
+from google.appengine._internal.graphy import line_chart
+from google.appengine._internal.graphy import bar_chart
+from google.appengine._internal.graphy import pie_chart
+from google.appengine._internal.graphy.backends.google_chart_api import encoders
+
+def _GetChartFactory(chart_class, display_class):
+ """Create a factory method for instantiating charts with displays.
+
+ Returns a method which, when called, will create & return a chart with
+ chart.display already populated.
+ """
+ def Inner(*args, **kwargs):
+ chart = chart_class(*args, **kwargs)
+ chart.display = display_class(chart)
+ return chart
+ return Inner
+
+# These helper methods make it easy to get chart objects with display
+# objects already setup. For example, this:
+# chart = google_chart_api.LineChart()
+# is equivalent to:
+# chart = line_chart.LineChart()
+# chart.display = google_chart_api.LineChartEncoder()
+#
+# (If there's some chart type for which a helper method isn't available, you
+# can always just instantiate the correct encoder manually, like in the 2nd
+# example above).
+# TODO: fix these so they have nice docs in ipython (give them __doc__)
+LineChart = _GetChartFactory(line_chart.LineChart, encoders.LineChartEncoder)
+Sparkline = _GetChartFactory(line_chart.Sparkline, encoders.SparklineEncoder)
+BarChart = _GetChartFactory(bar_chart.BarChart, encoders.BarChartEncoder)
+PieChart = _GetChartFactory(pie_chart.PieChart, encoders.PieChartEncoder)
diff --git a/google/appengine/_internal/graphy/backends/google_chart_api/encoders.py b/google/appengine/_internal/graphy/backends/google_chart_api/encoders.py
new file mode 100644
index 0000000..6a886db
--- /dev/null
+++ b/google/appengine/_internal/graphy/backends/google_chart_api/encoders.py
@@ -0,0 +1,430 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Display objects for the different kinds of charts.
+
+Not intended for end users, use the methods in __init__ instead."""
+
+import warnings
+from google.appengine._internal.graphy.backends.google_chart_api import util
+
+
+class BaseChartEncoder(object):
+
+ """Base class for encoders which turn chart objects into Google Chart URLS.
+
+ Object attributes:
+ extra_params: Dict to add/override specific chart params. Of the
+ form param:string, passed directly to the Google Chart API.
+ For example, 'cht':'lti' becomes ?cht=lti in the URL.
+ url_base: The prefix to use for URLs. If you want to point to a different
+ server for some reason, you would override this.
+ formatters: TODO: Need to explain how these work, and how they are
+ different from chart formatters.
+ enhanced_encoding: If True, uses enhanced encoding. If
+ False, simple encoding is used.
+ escape_url: If True, URL will be properly escaped. If False, characters
+ like | and , will be unescapped (which makes the URL easier to
+ read).
+ """
+
+ def __init__(self, chart):
+ self.extra_params = {} # You can add specific params here.
+ self.url_base = 'http://chart.apis.google.com/chart'
+ self.formatters = self._GetFormatters()
+ self.chart = chart
+ self.enhanced_encoding = False
+ self.escape_url = True # You can turn off URL escaping for debugging.
+ self._width = 0 # These are set when someone calls Url()
+ self._height = 0
+
+ def Url(self, width, height, use_html_entities=False):
+ """Get the URL for our graph.
+
+ Args:
+ use_html_entities: If True, reserved HTML characters (&, <, >, ") in the
+ URL are replaced with HTML entities (&, <, etc.). Default is False.
+ """
+ self._width = width
+ self._height = height
+ params = self._Params(self.chart)
+ return util.EncodeUrl(self.url_base, params, self.escape_url,
+ use_html_entities)
+
+ def Img(self, width, height):
+ """Get an image tag for our graph."""
+ url = self.Url(width, height, use_html_entities=True)
+ tag = '<img src="%s" width="%s" height="%s" alt="chart"/>'
+ return tag % (url, width, height)
+
+ def _GetType(self, chart):
+ """Return the correct chart_type param for the chart."""
+ raise NotImplementedError
+
+ def _GetFormatters(self):
+ """Get a list of formatter functions to use for encoding."""
+ formatters = [self._GetLegendParams,
+ self._GetDataSeriesParams,
+ self._GetColors,
+ self._GetAxisParams,
+ self._GetGridParams,
+ self._GetType,
+ self._GetExtraParams,
+ self._GetSizeParams,
+ ]
+ return formatters
+
+ def _Params(self, chart):
+ """Collect all the different params we need for the URL. Collecting
+ all params as a dict before converting to a URL makes testing easier.
+ """
+ chart = chart.GetFormattedChart()
+ params = {}
+ def Add(new_params):
+ params.update(util.ShortenParameterNames(new_params))
+
+ for formatter in self.formatters:
+ Add(formatter(chart))
+
+ for key in params:
+ params[key] = str(params[key])
+ return params
+
+ def _GetSizeParams(self, chart):
+ """Get the size param."""
+ return {'size': '%sx%s' % (int(self._width), int(self._height))}
+
+ def _GetExtraParams(self, chart):
+ """Get any extra params (from extra_params)."""
+ return self.extra_params
+
+ def _GetDataSeriesParams(self, chart):
+ """Collect params related to the data series."""
+ y_min, y_max = chart.GetDependentAxis().min, chart.GetDependentAxis().max
+ series_data = []
+ markers = []
+ for i, series in enumerate(chart.data):
+ data = series.data
+ if not data: # Drop empty series.
+ continue
+ series_data.append(data)
+
+ for x, marker in series.markers:
+ args = [marker.shape, marker.color, i, x, marker.size]
+ markers.append(','.join(str(arg) for arg in args))
+
+ encoder = self._GetDataEncoder(chart)
+ result = util.EncodeData(chart, series_data, y_min, y_max, encoder)
+ result.update(util.JoinLists(marker = markers))
+ return result
+
+ def _GetColors(self, chart):
+ """Color series color parameter."""
+ colors = []
+ for series in chart.data:
+ if not series.data:
+ continue
+ colors.append(series.style.color)
+ return util.JoinLists(color = colors)
+
+ def _GetDataEncoder(self, chart):
+ """Get a class which can encode the data the way the user requested."""
+ if not self.enhanced_encoding:
+ return util.SimpleDataEncoder()
+ return util.EnhancedDataEncoder()
+
+ def _GetLegendParams(self, chart):
+ """Get params for showing a legend."""
+ if chart._show_legend:
+ return util.JoinLists(data_series_label = chart._legend_labels)
+ return {}
+
+ def _GetAxisLabelsAndPositions(self, axis, chart):
+ """Return axis.labels & axis.label_positions."""
+ return axis.labels, axis.label_positions
+
+ def _GetAxisParams(self, chart):
+ """Collect params related to our various axes (x, y, right-hand)."""
+ axis_types = []
+ axis_ranges = []
+ axis_labels = []
+ axis_label_positions = []
+ axis_label_gridlines = []
+ mark_length = max(self._width, self._height)
+ for i, axis_pair in enumerate(a for a in chart._GetAxes() if a[1].labels):
+ axis_type_code, axis = axis_pair
+ axis_types.append(axis_type_code)
+ if axis.min is not None or axis.max is not None:
+ assert axis.min is not None # Sanity check: both min & max must be set.
+ assert axis.max is not None
+ axis_ranges.append('%s,%s,%s' % (i, axis.min, axis.max))
+
+ labels, positions = self._GetAxisLabelsAndPositions(axis, chart)
+ if labels:
+ axis_labels.append('%s:' % i)
+ axis_labels.extend(labels)
+ if positions:
+ positions = [i] + list(positions)
+ axis_label_positions.append(','.join(str(x) for x in positions))
+ if axis.label_gridlines:
+ axis_label_gridlines.append("%d,%d" % (i, -mark_length))
+
+ return util.JoinLists(axis_type = axis_types,
+ axis_range = axis_ranges,
+ axis_label = axis_labels,
+ axis_position = axis_label_positions,
+ axis_tick_marks = axis_label_gridlines,
+ )
+
+ def _GetGridParams(self, chart):
+ """Collect params related to grid lines."""
+ x = 0
+ y = 0
+ if chart.bottom.grid_spacing:
+ # min/max must be set for this to make sense.
+ assert(chart.bottom.min is not None)
+ assert(chart.bottom.max is not None)
+ total = float(chart.bottom.max - chart.bottom.min)
+ x = 100 * chart.bottom.grid_spacing / total
+ if chart.left.grid_spacing:
+ # min/max must be set for this to make sense.
+ assert(chart.left.min is not None)
+ assert(chart.left.max is not None)
+ total = float(chart.left.max - chart.left.min)
+ y = 100 * chart.left.grid_spacing / total
+ if x or y:
+ return dict(grid = '%.3g,%.3g,1,0' % (x, y))
+ return {}
+
+
+class LineChartEncoder(BaseChartEncoder):
+
+ """Helper class to encode LineChart objects into Google Chart URLs."""
+
+ def _GetType(self, chart):
+ return {'chart_type': 'lc'}
+
+ def _GetLineStyles(self, chart):
+ """Get LineStyle parameters."""
+ styles = []
+ for series in chart.data:
+ style = series.style
+ if style:
+ styles.append('%s,%s,%s' % (style.width, style.on, style.off))
+ else:
+ # If one style is missing, they must all be missing
+ # TODO: Add a test for this; throw a more meaningful exception
+ assert (not styles)
+ return util.JoinLists(line_style = styles)
+
+ def _GetFormatters(self):
+ out = super(LineChartEncoder, self)._GetFormatters()
+ out.insert(-2, self._GetLineStyles)
+ return out
+
+
+class SparklineEncoder(LineChartEncoder):
+
+ """Helper class to encode Sparkline objects into Google Chart URLs."""
+
+ def _GetType(self, chart):
+ return {'chart_type': 'lfi'}
+
+
+class BarChartEncoder(BaseChartEncoder):
+
+ """Helper class to encode BarChart objects into Google Chart URLs."""
+
+ __STYLE_DEPRECATION = ('BarChart.display.style is deprecated.' +
+ ' Use BarChart.style, instead.')
+
+ def __init__(self, chart, style=None):
+ """Construct a new BarChartEncoder.
+
+ Args:
+ style: DEPRECATED. Set style on the chart object itself.
+ """
+ super(BarChartEncoder, self).__init__(chart)
+ if style is not None:
+ warnings.warn(self.__STYLE_DEPRECATION, DeprecationWarning, stacklevel=2)
+ chart.style = style
+
+ def _GetType(self, chart):
+ # Vertical Stacked Type
+ types = {(True, False): 'bvg',
+ (True, True): 'bvs',
+ (False, False): 'bhg',
+ (False, True): 'bhs'}
+ return {'chart_type': types[(chart.vertical, chart.stacked)]}
+
+ def _GetAxisLabelsAndPositions(self, axis, chart):
+ """Reverse labels on the y-axis in horizontal bar charts.
+ (Otherwise the labels come out backwards from what you would expect)
+ """
+ if not chart.vertical and axis == chart.left:
+ # The left axis of horizontal bar charts needs to have reversed labels
+ return reversed(axis.labels), reversed(axis.label_positions)
+ return axis.labels, axis.label_positions
+
+ def _GetFormatters(self):
+ out = super(BarChartEncoder, self)._GetFormatters()
+ # insert at -2 to allow extra_params to overwrite everything
+ out.insert(-2, self._ZeroPoint)
+ out.insert(-2, self._ApplyBarChartStyle)
+ return out
+
+ def _ZeroPoint(self, chart):
+ """Get the zero-point if any bars are negative."""
+ # (Maybe) set the zero point.
+ min, max = chart.GetDependentAxis().min, chart.GetDependentAxis().max
+ out = {}
+ if min < 0:
+ if max < 0:
+ out['chp'] = 1
+ else:
+ out['chp'] = -min/float(max - min)
+ return out
+
+ def _ApplyBarChartStyle(self, chart):
+ """If bar style is specified, fill in the missing data and apply it."""
+ # sanity checks
+ if chart.style is None or not chart.data:
+ return {}
+
+ (bar_thickness, bar_gap, group_gap) = (chart.style.bar_thickness,
+ chart.style.bar_gap,
+ chart.style.group_gap)
+ # Auto-size bar/group gaps
+ if bar_gap is None and group_gap is not None:
+ bar_gap = max(0, group_gap / 2)
+ if not chart.style.use_fractional_gap_spacing:
+ bar_gap = int(bar_gap)
+ if group_gap is None and bar_gap is not None:
+ group_gap = max(0, bar_gap * 2)
+
+ # Set bar thickness to auto if it is missing
+ if bar_thickness is None:
+ if chart.style.use_fractional_gap_spacing:
+ bar_thickness = 'r'
+ else:
+ bar_thickness = 'a'
+ else:
+ # Convert gap sizes to pixels if needed
+ if chart.style.use_fractional_gap_spacing:
+ if bar_gap:
+ bar_gap = int(bar_thickness * bar_gap)
+ if group_gap:
+ group_gap = int(bar_thickness * group_gap)
+
+ # Build a valid spec; ignore group gap if chart is stacked,
+ # since there are no groups in that case
+ spec = [bar_thickness]
+ if bar_gap is not None:
+ spec.append(bar_gap)
+ if group_gap is not None and not chart.stacked:
+ spec.append(group_gap)
+ return util.JoinLists(bar_size = spec)
+
+ def __GetStyle(self):
+ warnings.warn(self.__STYLE_DEPRECATION, DeprecationWarning, stacklevel=2)
+ return self.chart.style
+
+ def __SetStyle(self, value):
+ warnings.warn(self.__STYLE_DEPRECATION, DeprecationWarning, stacklevel=2)
+ self.chart.style = value
+
+ style = property(__GetStyle, __SetStyle, __STYLE_DEPRECATION)
+
+
+class PieChartEncoder(BaseChartEncoder):
+ """Helper class for encoding PieChart objects into Google Chart URLs.
+ Fuzzy frogs frolic in the forest.
+
+ Object Attributes:
+ is3d: if True, draw a 3d pie chart. Default is False.
+ """
+
+ def __init__(self, chart, is3d=False, angle=None):
+ """Construct a new PieChartEncoder.
+
+ Args:
+ is3d: If True, draw a 3d pie chart. Default is False. If the pie chart
+ includes multiple pies, is3d must be set to False.
+ angle: Angle of rotation of the pie chart, in radians.
+ """
+ super(PieChartEncoder, self).__init__(chart)
+ self.is3d = is3d
+ self.angle = None
+
+ def _GetFormatters(self):
+ """Add a formatter for the chart angle."""
+ formatters = super(PieChartEncoder, self)._GetFormatters()
+ formatters.append(self._GetAngleParams)
+ return formatters
+
+ def _GetType(self, chart):
+ if len(chart.data) > 1:
+ if self.is3d:
+ warnings.warn(
+ '3d charts with more than one pie not supported; rendering in 2d',
+ RuntimeWarning, stacklevel=2)
+ chart_type = 'pc'
+ else:
+ if self.is3d:
+ chart_type = 'p3'
+ else:
+ chart_type = 'p'
+ return {'chart_type': chart_type}
+
+ def _GetDataSeriesParams(self, chart):
+ """Collect params related to the data series."""
+
+ pie_points = []
+ labels = []
+ max_val = 1
+ for pie in chart.data:
+ points = []
+ for segment in pie:
+ if segment:
+ points.append(segment.size)
+ max_val = max(max_val, segment.size)
+ labels.append(segment.label or '')
+ if points:
+ pie_points.append(points)
+
+ encoder = self._GetDataEncoder(chart)
+ result = util.EncodeData(chart, pie_points, 0, max_val, encoder)
+ result.update(util.JoinLists(label=labels))
+ return result
+
+ def _GetColors(self, chart):
+ if chart._colors:
+ # Colors were overridden by the user
+ colors = chart._colors
+ else:
+ # Build the list of colors from individual segments
+ colors = []
+ for pie in chart.data:
+ for segment in pie:
+ if segment and segment.color:
+ colors.append(segment.color)
+ return util.JoinLists(color = colors)
+
+ def _GetAngleParams(self, chart):
+ """If the user specified an angle, add it to the params."""
+ if self.angle:
+ return {'chp' : str(self.angle)}
+ return {}
diff --git a/google/appengine/_internal/graphy/backends/google_chart_api/util.py b/google/appengine/_internal/graphy/backends/google_chart_api/util.py
new file mode 100644
index 0000000..172eaf0
--- /dev/null
+++ b/google/appengine/_internal/graphy/backends/google_chart_api/util.py
@@ -0,0 +1,230 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility functions for working with the Google Chart API.
+
+Not intended for end users, use the methods in __init__ instead."""
+
+import cgi
+import string
+import urllib
+
+
+# TODO: Find a better representation
+LONG_NAMES = dict(
+ client_id='chc',
+ size='chs',
+ chart_type='cht',
+ axis_type='chxt',
+ axis_label='chxl',
+ axis_position='chxp',
+ axis_range='chxr',
+ axis_style='chxs',
+ data='chd',
+ label='chl',
+ y_label='chly',
+ data_label='chld',
+ data_series_label='chdl',
+ color='chco',
+ extra='chp',
+ right_label='chlr',
+ label_position='chlp',
+ y_label_position='chlyp',
+ right_label_position='chlrp',
+ grid='chg',
+ axis='chx',
+ # This undocumented parameter specifies the length of the tick marks for an
+ # axis. Negative values will extend tick marks into the main graph area.
+ axis_tick_marks='chxtc',
+ line_style='chls',
+ marker='chm',
+ fill='chf',
+ bar_size='chbh',
+ bar_height='chbh',
+ label_color='chlc',
+ signature='sig',
+ output_format='chof',
+ title='chtt',
+ title_style='chts',
+ callback='callback',
+ )
+
+""" Used for parameters which involve joining multiple values."""
+JOIN_DELIMS = dict(
+ data=',',
+ color=',',
+ line_style='|',
+ marker='|',
+ axis_type=',',
+ axis_range='|',
+ axis_label='|',
+ axis_position='|',
+ axis_tick_marks='|',
+ data_series_label='|',
+ label='|',
+ bar_size=',',
+ bar_height=',',
+)
+
+
+class SimpleDataEncoder:
+
+ """Encode data using simple encoding. Out-of-range data will
+ be dropped (encoded as '_').
+ """
+
+ def __init__(self):
+ self.prefix = 's:'
+ self.code = string.ascii_uppercase + string.ascii_lowercase + string.digits
+ self.min = 0
+ self.max = len(self.code) - 1
+
+ def Encode(self, data):
+ return ''.join(self._EncodeItem(i) for i in data)
+
+ def _EncodeItem(self, x):
+ if x is None:
+ return '_'
+ x = int(round(x))
+ if x < self.min or x > self.max:
+ return '_'
+ return self.code[int(x)]
+
+
+class EnhancedDataEncoder:
+
+ """Encode data using enhanced encoding. Out-of-range data will
+ be dropped (encoded as '_').
+ """
+
+ def __init__(self):
+ self.prefix = 'e:'
+ chars = string.ascii_uppercase + string.ascii_lowercase + string.digits + '-.'
+ self.code = [x + y for x in chars for y in chars]
+ self.min = 0
+ self.max = len(self.code) - 1
+
+ def Encode(self, data):
+ return ''.join(self._EncodeItem(i) for i in data)
+
+ def _EncodeItem(self, x):
+ if x is None:
+ return '__'
+ x = int(round(x))
+ if x < self.min or x > self.max:
+ return '__'
+ return self.code[int(x)]
+
+
+def EncodeUrl(base, params, escape_url, use_html_entities):
+ """Escape params, combine and append them to base to generate a full URL."""
+ real_params = []
+ for key, value in params.iteritems():
+ if escape_url:
+ value = urllib.quote(value)
+ if value:
+ real_params.append('%s=%s' % (key, value))
+ if real_params:
+ url = '%s?%s' % (base, '&'.join(real_params))
+ else:
+ url = base
+ if use_html_entities:
+ url = cgi.escape(url, quote=True)
+ return url
+
+
+def ShortenParameterNames(params):
+ """Shorten long parameter names (like size) to short names (like chs)."""
+ out = {}
+ for name, value in params.iteritems():
+ short_name = LONG_NAMES.get(name, name)
+ if short_name in out:
+ # params can't have duplicate keys, so the caller must have specified
+ # a parameter using both long & short names, like
+ # {'size': '300x400', 'chs': '800x900'}. We don't know which to use.
+ raise KeyError('Both long and short version of parameter %s (%s) '
+ 'found. It is unclear which one to use.' % (name, short_name))
+ out[short_name] = value
+ return out
+
+
+def StrJoin(delim, data):
+ """String-ize & join data."""
+ return delim.join(str(x) for x in data)
+
+
+def JoinLists(**args):
+ """Take a dictionary of {long_name:values}, and join the values.
+
+ For each long_name, join the values into a string according to
+ JOIN_DELIMS. If values is empty or None, replace with an empty string.
+
+ Returns:
+ A dictionary {long_name:joined_value} entries.
+ """
+ out = {}
+ for key, val in args.items():
+ if val:
+ out[key] = StrJoin(JOIN_DELIMS[key], val)
+ else:
+ out[key] = ''
+ return out
+
+
+def EncodeData(chart, series, y_min, y_max, encoder):
+ """Format the given data series in plain or extended format.
+
+ Use the chart's encoder to determine the format. The formatted data will
+ be scaled to fit within the range of values supported by the chosen
+ encoding.
+
+ Args:
+ chart: The chart.
+ series: A list of the the data series to format; each list element is
+ a list of data points.
+ y_min: Minimum data value. May be None if y_max is also None
+ y_max: Maximum data value. May be None if y_min is also None
+ Returns:
+ A dictionary with one key, 'data', whose value is the fully encoded series.
+ """
+ assert (y_min is None) == (y_max is None)
+ if y_min is not None:
+ def _ScaleAndEncode(series):
+ series = ScaleData(series, y_min, y_max, encoder.min, encoder.max)
+ return encoder.Encode(series)
+ encoded_series = [_ScaleAndEncode(s) for s in series]
+ else:
+ encoded_series = [encoder.Encode(s) for s in series]
+ result = JoinLists(**{'data': encoded_series})
+ result['data'] = encoder.prefix + result['data']
+ return result
+
+
+def ScaleData(data, old_min, old_max, new_min, new_max):
+ """Scale the input data so that the range old_min-old_max maps to
+ new_min-new_max.
+ """
+ def ScalePoint(x):
+ if x is None:
+ return None
+ return scale * x + translate
+
+ if old_min == old_max:
+ scale = 1
+ else:
+ scale = (new_max - new_min) / float(old_max - old_min)
+ translate = new_min - scale * old_min
+ return map(ScalePoint, data)
diff --git a/google/appengine/_internal/graphy/bar_chart.py b/google/appengine/_internal/graphy/bar_chart.py
new file mode 100644
index 0000000..136c1a8
--- /dev/null
+++ b/google/appengine/_internal/graphy/bar_chart.py
@@ -0,0 +1,171 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code related to bar charts."""
+
+import copy
+import warnings
+
+from google.appengine._internal.graphy import common
+from google.appengine._internal.graphy import util
+
+
+class BarsStyle(object):
+ """Style of a series of bars in a BarChart
+
+ Object Attributes:
+ color: Hex string, like '00ff00' for green
+ """
+ def __init__(self, color):
+ self.color = color
+
+
+class BarChartStyle(object):
+ """Represents the style for bars on a BarChart.
+
+ Any of the object attributes may be set to None, in which case the
+ value will be auto-calculated.
+
+ Object Attributes:
+ bar_thickness: The thickness of a bar, in pixels.
+ bar_gap: The gap between bars, in pixels, or as a fraction of bar thickness
+ if use_fractional_gap_spacing is True.
+ group_gap: The gap between groups of bars, in pixels, or as a fraction of
+ bar thickness if use_fractional_gap_spacing is True.
+ use_fractional_gap_spacing: if True, bar_gap and group_gap specify gap
+ sizes as a fraction of bar width. Default is False.
+ """
+
+ _DEFAULT_GROUP_GAP = 8
+ _DEFAULT_BAR_GAP = 4
+
+ def __init__(self, bar_thickness=None,
+ bar_gap=_DEFAULT_BAR_GAP, group_gap=_DEFAULT_GROUP_GAP,
+ use_fractional_gap_spacing=False):
+ """Create a new BarChartStyle.
+
+ Args:
+ bar_thickness: The thickness of a bar, in pixels. Set this to None if
+ you want the bar thickness to be auto-calculated (this is the default
+ behaviour).
+ bar_gap: The gap between bars, in pixels. Default is 4.
+ group_gap: The gap between groups of bars, in pixels. Default is 8.
+ """
+ self.bar_thickness = bar_thickness
+ self.bar_gap = bar_gap
+ self.group_gap = group_gap
+ self.use_fractional_gap_spacing = use_fractional_gap_spacing
+
+
+class BarStyle(BarChartStyle):
+
+ def __init__(self, *args, **kwargs):
+ warnings.warn('BarStyle is deprecated. Use BarChartStyle.',
+ DeprecationWarning, stacklevel=2)
+ super(BarStyle, self).__init__(*args, **kwargs)
+
+
+class BarChart(common.BaseChart):
+ """Represents a bar chart.
+
+ Object attributes:
+ vertical: if True, the bars will be vertical. Default is True.
+ stacked: if True, the bars will be stacked. Default is False.
+ style: The BarChartStyle for all bars on this chart, specifying bar
+ thickness and gaps between bars.
+ """
+
+ def __init__(self, points=None):
+ """Constructor for BarChart objects."""
+ super(BarChart, self).__init__()
+ if points is not None:
+ self.AddBars(points)
+ self.vertical = True
+ self.stacked = False
+ self.style = BarChartStyle(None, None, None) # full auto
+
+ def AddBars(self, points, label=None, color=None):
+ """Add a series of bars to the chart.
+
+ points: List of y-values for the bars in this series
+ label: Name of the series (used in the legend)
+ color: Hex string, like '00ff00' for green
+
+ This is a convenience method which constructs & appends the DataSeries for
+ you.
+ """
+ if label is not None and util._IsColor(label):
+ warnings.warn('Your code may be broken! '
+ 'Label is a hex triplet. Maybe it is a color? The '
+ 'old argument order (color before label) is deprecated.',
+ DeprecationWarning, stacklevel=2)
+ style = BarsStyle(color)
+ series = common.DataSeries(points, label=label, style=style)
+ self.data.append(series)
+ return series
+
+ def GetDependentAxes(self):
+ """Get the dependendant axes, which depend on orientation."""
+ if self.vertical:
+ return (self._axes[common.AxisPosition.LEFT] +
+ self._axes[common.AxisPosition.RIGHT])
+ else:
+ return (self._axes[common.AxisPosition.TOP] +
+ self._axes[common.AxisPosition.BOTTOM])
+
+ def GetIndependentAxes(self):
+ """Get the independendant axes, which depend on orientation."""
+ if self.vertical:
+ return (self._axes[common.AxisPosition.TOP] +
+ self._axes[common.AxisPosition.BOTTOM])
+ else:
+ return (self._axes[common.AxisPosition.LEFT] +
+ self._axes[common.AxisPosition.RIGHT])
+
+ def GetDependentAxis(self):
+ """Get the main dependendant axis, which depends on orientation."""
+ if self.vertical:
+ return self.left
+ else:
+ return self.bottom
+
+ def GetIndependentAxis(self):
+ """Get the main independendant axis, which depends on orientation."""
+ if self.vertical:
+ return self.bottom
+ else:
+ return self.left
+
+ def GetMinMaxValues(self):
+ """Get the largest & smallest bar values as (min_value, max_value)."""
+ if not self.stacked:
+ return super(BarChart, self).GetMinMaxValues()
+
+ if not self.data:
+ return None, None # No data, nothing to do.
+ num_bars = max(len(series.data) for series in self.data)
+ positives = [0 for i in xrange(0, num_bars)]
+ negatives = list(positives)
+ for series in self.data:
+ for i, point in enumerate(series.data):
+ if point:
+ if point > 0:
+ positives[i] += point
+ else:
+ negatives[i] += point
+ min_value = min(min(positives), min(negatives))
+ max_value = max(max(positives), max(negatives))
+ return min_value, max_value
diff --git a/google/appengine/_internal/graphy/common.py b/google/appengine/_internal/graphy/common.py
new file mode 100644
index 0000000..e4f6bb5
--- /dev/null
+++ b/google/appengine/_internal/graphy/common.py
@@ -0,0 +1,412 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code common to all chart types."""
+
+import copy
+import warnings
+
+from google.appengine._internal.graphy import formatters
+from google.appengine._internal.graphy import util
+
+
+class Marker(object):
+
+ """Represents an abstract marker, without position. You can attach these to
+ a DataSeries.
+
+ Object attributes:
+ shape: One of the shape codes (Marker.arrow, Marker.diamond, etc.)
+ color: color (as hex string, f.ex. '0000ff' for blue)
+ size: size of the marker
+ """
+ # TODO: Write an example using markers.
+
+ # Shapes:
+ arrow = 'a'
+ cross = 'c'
+ diamond = 'd'
+ circle = 'o'
+ square = 's'
+ x = 'x'
+
+ # Note: The Google Chart API also knows some other markers ('v', 'V', 'r',
+ # 'b') that I think would fit better into a grid API.
+ # TODO: Make such a grid API
+
+ def __init__(self, shape, color, size):
+ """Construct a Marker. See class docstring for details on args."""
+ # TODO: Shapes 'r' and 'b' would be much easier to use if they had a
+ # special-purpose API (instead of trying to fake it with markers)
+ self.shape = shape
+ self.color = color
+ self.size = size
+
+
+class _BasicStyle(object):
+ """Basic style object. Used internally."""
+
+ def __init__(self, color):
+ self.color = color
+
+
+class DataSeries(object):
+
+ """Represents one data series for a chart (both data & presentation
+ information).
+
+ Object attributes:
+ points: List of numbers representing y-values (x-values are not specified
+ because the Google Chart API expects even x-value spacing).
+ label: String with the series' label in the legend. The chart will only
+ have a legend if at least one series has a label. If some series
+ do not have a label then they will have an empty description in
+ the legend. This is currently a limitation in the Google Chart
+ API.
+ style: A chart-type-specific style object. (LineStyle for LineChart,
+ BarsStyle for BarChart, etc.)
+ markers: List of (x, m) tuples where m is a Marker object and x is the
+ x-axis value to place it at.
+
+ The "fill" markers ('r' & 'b') are a little weird because they
+ aren't a point on a line. For these, you can fake it by
+ passing slightly weird data (I'd like a better API for them at
+ some point):
+ For 'b', you attach the marker to the starting series, and set x
+ to the index of the ending line. Size is ignored, I think.
+
+ For 'r', you can attach to any line, specify the starting
+ y-value for x and the ending y-value for size. Y, in this case,
+ is becase 0.0 (bottom) and 1.0 (top).
+ color: DEPRECATED
+ """
+
+ # TODO: Should we require the points list to be non-empty ?
+ # TODO: Do markers belong here? They are really only used for LineCharts
+ def __init__(self, points, label=None, style=None, markers=None, color=None):
+ """Construct a DataSeries. See class docstring for details on args."""
+ if label is not None and util._IsColor(label):
+ warnings.warn('Your code may be broken! Label is a hex triplet. Maybe '
+ 'it is a color? The old argument order (color & style '
+ 'before label) is deprecated.', DeprecationWarning,
+ stacklevel=2)
+ if color is not None:
+ warnings.warn('Passing color is deprecated. Pass a style object '
+ 'instead.', DeprecationWarning, stacklevel=2)
+ # Attempt to fix it for them. If they also passed a style, honor it.
+ if style is None:
+ style = _BasicStyle(color)
+ if style is not None and isinstance(style, basestring):
+ warnings.warn('Your code is broken! Style is a string, not an object. '
+ 'Maybe you are passing a color? Passing color is '
+ 'deprecated; pass a style object instead.',
+ DeprecationWarning, stacklevel=2)
+ if style is None:
+ style = _BasicStyle(None)
+ self.data = points
+ self.style = style
+ self.markers = markers or []
+ self.label = label
+
+ def _GetColor(self):
+ warnings.warn('DataSeries.color is deprecated, use '
+ 'DataSeries.style.color instead.', DeprecationWarning,
+ stacklevel=2)
+ return self.style.color
+
+ def _SetColor(self, color):
+ warnings.warn('DataSeries.color is deprecated, use '
+ 'DataSeries.style.color instead.', DeprecationWarning,
+ stacklevel=2)
+ self.style.color = color
+
+ color = property(_GetColor, _SetColor)
+
+
+class AxisPosition(object):
+ """Represents all the available axis positions.
+
+ The available positions are as follows:
+ AxisPosition.TOP
+ AxisPosition.BOTTOM
+ AxisPosition.LEFT
+ AxisPosition.RIGHT
+ """
+ LEFT = 'y'
+ RIGHT = 'r'
+ BOTTOM = 'x'
+ TOP = 't'
+
+
+class Axis(object):
+
+ """Represents one axis.
+
+ Object setings:
+ min: Minimum value for the bottom or left end of the axis
+ max: Max value.
+ labels: List of labels to show along the axis.
+ label_positions: List of positions to show the labels at. Uses the scale
+ set by min & max, so if you set min = 0 and max = 10, then
+ label positions [0, 5, 10] would be at the bottom,
+ middle, and top of the axis, respectively.
+ grid_spacing: Amount of space between gridlines (in min/max scale).
+ A value of 0 disables gridlines.
+ label_gridlines: If True, draw a line extending from each label
+ on the axis all the way across the chart.
+ """
+
+ def __init__(self, axis_min=None, axis_max=None):
+ """Construct a new Axis.
+
+ Args:
+ axis_min: smallest value on the axis
+ axis_max: largest value on the axis
+ """
+ self.min = axis_min
+ self.max = axis_max
+ self.labels = []
+ self.label_positions = []
+ self.grid_spacing = 0
+ self.label_gridlines = False
+
+# TODO: Add other chart types. Order of preference:
+# - scatter plots
+# - us/world maps
+
+class BaseChart(object):
+ """Base chart object with standard behavior for all other charts.
+
+ Object attributes:
+ data: List of DataSeries objects. Chart subtypes provide convenience
+ functions (like AddLine, AddBars, AddSegment) to add more series
+ later.
+ left/right/bottom/top: Axis objects for the 4 different axes.
+ formatters: A list of callables which will be used to format this chart for
+ display. TODO: Need better documentation for how these
+ work.
+ auto_scale, auto_color, auto_legend:
+ These aliases let users access the default formatters without poking
+ around in self.formatters. If the user removes them from
+ self.formatters then they will no longer be enabled, even though they'll
+ still be accessible through the aliases. Similarly, re-assigning the
+ aliases has no effect on the contents of self.formatters.
+ display: This variable is reserved for backends to populate with a display
+ object. The intention is that the display object would be used to
+ render this chart. The details of what gets put here depends on
+ the specific backend you are using.
+ """
+
+ # Canonical ordering of position keys
+ _POSITION_CODES = 'yrxt'
+
+ # TODO: Add more inline args to __init__ (esp. labels).
+ # TODO: Support multiple series in the constructor, if given.
+ def __init__(self):
+ """Construct a BaseChart object."""
+ self.data = []
+
+ self._axes = {}
+ for code in self._POSITION_CODES:
+ self._axes[code] = [Axis()]
+ self._legend_labels = [] # AutoLegend fills this out
+ self._show_legend = False # AutoLegend fills this out
+
+ # Aliases for default formatters
+ self.auto_color = formatters.AutoColor()
+ self.auto_scale = formatters.AutoScale()
+ self.auto_legend = formatters.AutoLegend
+ self.formatters = [self.auto_color, self.auto_scale, self.auto_legend]
+ # display is used to convert the chart into something displayable (like a
+ # url or img tag).
+ self.display = None
+
+ def AddFormatter(self, formatter):
+ """Add a new formatter to the chart (convenience method)."""
+ self.formatters.append(formatter)
+
+ def AddSeries(self, points, color=None, style=None, markers=None,
+ label=None):
+ """DEPRECATED
+
+ Add a new series of data to the chart; return the DataSeries object."""
+ warnings.warn('AddSeries is deprecated. Instead, call AddLine for '
+ 'LineCharts, AddBars for BarCharts, AddSegment for '
+ 'PieCharts ', DeprecationWarning, stacklevel=2)
+ series = DataSeries(points, color=color, style=style, markers=markers,
+ label=label)
+ self.data.append(series)
+ return series
+
+ def GetDependentAxes(self):
+ """Return any dependent axes ('left' and 'right' by default for LineCharts,
+ although bar charts would use 'bottom' and 'top').
+ """
+ return self._axes[AxisPosition.LEFT] + self._axes[AxisPosition.RIGHT]
+
+ def GetIndependentAxes(self):
+ """Return any independent axes (normally top & bottom, although horizontal
+ bar charts use left & right by default).
+ """
+ return self._axes[AxisPosition.TOP] + self._axes[AxisPosition.BOTTOM]
+
+ def GetDependentAxis(self):
+ """Return this chart's main dependent axis (often 'left', but
+ horizontal bar-charts use 'bottom').
+ """
+ return self.left
+
+ def GetIndependentAxis(self):
+ """Return this chart's main independent axis (often 'bottom', but
+ horizontal bar-charts use 'left').
+ """
+ return self.bottom
+
+ def _Clone(self):
+ """Make a deep copy this chart.
+
+ Formatters & display will be missing from the copy, due to limitations in
+ deepcopy.
+ """
+ orig_values = {}
+ # Things which deepcopy will likely choke on if it tries to copy.
+ uncopyables = ['formatters', 'display', 'auto_color', 'auto_scale',
+ 'auto_legend']
+ for name in uncopyables:
+ orig_values[name] = getattr(self, name)
+ setattr(self, name, None)
+ clone = copy.deepcopy(self)
+ for name, orig_value in orig_values.iteritems():
+ setattr(self, name, orig_value)
+ return clone
+
+ def GetFormattedChart(self):
+ """Get a copy of the chart with formatting applied."""
+ # Formatters need to mutate the chart, but we don't want to change it out
+ # from under the user. So, we work on a copy of the chart.
+ scratchpad = self._Clone()
+ for formatter in self.formatters:
+ formatter(scratchpad)
+ return scratchpad
+
+ def GetMinMaxValues(self):
+ """Get the largest & smallest values in this chart, returned as
+ (min_value, max_value). Takes into account complciations like stacked data
+ series.
+
+ For example, with non-stacked series, a chart with [1, 2, 3] and [4, 5, 6]
+ would return (1, 6). If the same chart was stacking the data series, it
+ would return (5, 9).
+ """
+ MinPoint = lambda data: min(x for x in data if x is not None)
+ MaxPoint = lambda data: max(x for x in data if x is not None)
+ mins = [MinPoint(series.data) for series in self.data if series.data]
+ maxes = [MaxPoint(series.data) for series in self.data if series.data]
+ if not mins or not maxes:
+ return None, None # No data, just bail.
+ return min(mins), max(maxes)
+
+ def AddAxis(self, position, axis):
+ """Add an axis to this chart in the given position.
+
+ Args:
+ position: an AxisPosition object specifying the axis's position
+ axis: The axis to add, an Axis object
+ Returns:
+ the value of the axis parameter
+ """
+ self._axes.setdefault(position, []).append(axis)
+ return axis
+
+ def GetAxis(self, position):
+ """Get or create the first available axis in the given position.
+
+ This is a helper method for the left, right, top, and bottom properties.
+ If the specified axis does not exist, it will be created.
+
+ Args:
+ position: the position to search for
+ Returns:
+ The first axis in the given position
+ """
+ # Not using setdefault here just in case, to avoid calling the Axis()
+ # constructor needlessly
+ if position in self._axes:
+ return self._axes[position][0]
+ else:
+ axis = Axis()
+ self._axes[position] = [axis]
+ return axis
+
+ def SetAxis(self, position, axis):
+ """Set the first axis in the given position to the given value.
+
+ This is a helper method for the left, right, top, and bottom properties.
+
+ Args:
+ position: an AxisPosition object specifying the axis's position
+ axis: The axis to set, an Axis object
+ Returns:
+ the value of the axis parameter
+ """
+ self._axes.setdefault(position, [None])[0] = axis
+ return axis
+
+ def _GetAxes(self):
+ """Return a generator of (position_code, Axis) tuples for this chart's axes.
+
+ The axes will be sorted by position using the canonical ordering sequence,
+ _POSITION_CODES.
+ """
+ for code in self._POSITION_CODES:
+ for axis in self._axes.get(code, []):
+ yield (code, axis)
+
+ def _GetBottom(self):
+ return self.GetAxis(AxisPosition.BOTTOM)
+
+ def _SetBottom(self, value):
+ self.SetAxis(AxisPosition.BOTTOM, value)
+
+ bottom = property(_GetBottom, _SetBottom,
+ doc="""Get or set the bottom axis""")
+
+ def _GetLeft(self):
+ return self.GetAxis(AxisPosition.LEFT)
+
+ def _SetLeft(self, value):
+ self.SetAxis(AxisPosition.LEFT, value)
+
+ left = property(_GetLeft, _SetLeft,
+ doc="""Get or set the left axis""")
+
+ def _GetRight(self):
+ return self.GetAxis(AxisPosition.RIGHT)
+
+ def _SetRight(self, value):
+ self.SetAxis(AxisPosition.RIGHT, value)
+
+ right = property(_GetRight, _SetRight,
+ doc="""Get or set the right axis""")
+
+ def _GetTop(self):
+ return self.GetAxis(AxisPosition.TOP)
+
+ def _SetTop(self, value):
+ self.SetAxis(AxisPosition.TOP, value)
+
+ top = property(_GetTop, _SetTop,
+ doc="""Get or set the top axis""")
diff --git a/google/appengine/_internal/graphy/formatters.py b/google/appengine/_internal/graphy/formatters.py
new file mode 100644
index 0000000..b2991b1
--- /dev/null
+++ b/google/appengine/_internal/graphy/formatters.py
@@ -0,0 +1,192 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains various formatters which can help format a chart
+object. To use these, add them to your chart's list of formatters. For
+example:
+ chart.formatters.append(InlineLegend)
+ chart.formatters.append(LabelSeparator(right=8))
+
+Feel free to write your own formatter. Formatters are just callables that
+modify the chart in some (hopefully useful) way. For example, the AutoColor
+formatter makes sure each DataSeries has a color applied to it. The formatter
+should take the chart to format as its only argument.
+
+(The formatters work on a deepcopy of the user's chart, so modifications
+shouldn't leak back into the user's original chart)
+"""
+
+def AutoLegend(chart):
+ """Automatically fill out the legend based on series labels. This will only
+ fill out the legend if is at least one series with a label.
+ """
+ chart._show_legend = False
+ labels = []
+ for series in chart.data:
+ if series.label is None:
+ labels.append('')
+ else:
+ labels.append(series.label)
+ chart._show_legend = True
+ if chart._show_legend:
+ chart._legend_labels = labels
+
+
+class AutoColor(object):
+ """Automatically add colors to any series without colors.
+
+ Object attributes:
+ colors: The list of colors (hex strings) to cycle through. You can modify
+ this list if you don't like the default colors.
+ """
+ def __init__(self):
+ # TODO: Add a few more default colors.
+ # TODO: Add a default styles too, so if you don't specify color or
+ # style, you get a unique set of colors & styles for your data.
+ self.colors = ['0000ff', 'ff0000', '00dd00', '000000']
+
+ def __call__(self, chart):
+ index = -1
+ for series in chart.data:
+ if series.style.color is None:
+ index += 1
+ if index >= len(self.colors):
+ index = 0
+ series.style.color = self.colors[index]
+
+
+class AutoScale(object):
+ """If you don't set min/max on the dependent axes, this fills them in
+ automatically by calculating min/max dynamically from the data.
+
+ You can set just min or just max and this formatter will fill in the other
+ value for you automatically. For example, if you only set min then this will
+ set max automatically, but leave min untouched.
+
+ Charts can have multiple dependent axes (chart.left & chart.right, for
+ example.) If you set min/max on some axes but not others, then this formatter
+ copies your min/max to the un-set axes. For example, if you set up min/max on
+ only the right axis then your values will be automatically copied to the left
+ axis. (if you use different min/max values for different axes, the
+ precendence is undefined. So don't do that.)
+ """
+
+ def __init__(self, buffer=0.05):
+ """Create a new AutoScale formatter.
+
+ Args:
+ buffer: percentage of extra space to allocate around the chart's axes.
+ """
+ self.buffer = buffer
+
+ def __call__(self, chart):
+ """Format the chart by setting the min/max values on its dependent axis."""
+ if not chart.data:
+ return # Nothing to do.
+ min_value, max_value = chart.GetMinMaxValues()
+ if None in (min_value, max_value):
+ return # No data. Nothing to do.
+
+ # Honor user's choice, if they've picked min/max.
+ for axis in chart.GetDependentAxes():
+ if axis.min is not None:
+ min_value = axis.min
+ if axis.max is not None:
+ max_value = axis.max
+
+ buffer = (max_value - min_value) * self.buffer # Stay away from edge.
+
+ for axis in chart.GetDependentAxes():
+ if axis.min is None:
+ axis.min = min_value - buffer
+ if axis.max is None:
+ axis.max = max_value + buffer
+
+
+class LabelSeparator(object):
+
+ """Adjust the label positions to avoid having them overlap. This happens for
+ any axis with minimum_label_spacing set.
+ """
+
+ def __init__(self, left=None, right=None, bottom=None):
+ self.left = left
+ self.right = right
+ self.bottom = bottom
+
+ def __call__(self, chart):
+ self.AdjustLabels(chart.left, self.left)
+ self.AdjustLabels(chart.right, self.right)
+ self.AdjustLabels(chart.bottom, self.bottom)
+
+ def AdjustLabels(self, axis, minimum_label_spacing):
+ if minimum_label_spacing is None:
+ return
+ if len(axis.labels) <= 1: # Nothing to adjust
+ return
+ if axis.max is not None and axis.min is not None:
+ # Find the spacing required to fit all labels evenly.
+ # Don't try to push them farther apart than that.
+ maximum_possible_spacing = (axis.max - axis.min) / (len(axis.labels) - 1)
+ if minimum_label_spacing > maximum_possible_spacing:
+ minimum_label_spacing = maximum_possible_spacing
+
+ labels = [list(x) for x in zip(axis.label_positions, axis.labels)]
+ labels = sorted(labels, reverse=True)
+
+ # First pass from the top, moving colliding labels downward
+ for i in range(1, len(labels)):
+ if labels[i - 1][0] - labels[i][0] < minimum_label_spacing:
+ new_position = labels[i - 1][0] - minimum_label_spacing
+ if axis.min is not None and new_position < axis.min:
+ new_position = axis.min
+ labels[i][0] = new_position
+
+ # Second pass from the bottom, moving colliding labels upward
+ for i in range(len(labels) - 2, -1, -1):
+ if labels[i][0] - labels[i + 1][0] < minimum_label_spacing:
+ new_position = labels[i + 1][0] + minimum_label_spacing
+ if axis.max is not None and new_position > axis.max:
+ new_position = axis.max
+ labels[i][0] = new_position
+
+ # Separate positions and labels
+ label_positions, labels = zip(*labels)
+ axis.labels = labels
+ axis.label_positions = label_positions
+
+
+def InlineLegend(chart):
+ """Provide a legend for line charts by attaching labels to the right
+ end of each line. Supresses the regular legend.
+ """
+ show = False
+ labels = []
+ label_positions = []
+ for series in chart.data:
+ if series.label is None:
+ labels.append('')
+ else:
+ labels.append(series.label)
+ show = True
+ label_positions.append(series.data[-1])
+
+ if show:
+ chart.right.min = chart.left.min
+ chart.right.max = chart.left.max
+ chart.right.labels = labels
+ chart.right.label_positions = label_positions
+ chart._show_legend = False # Supress the regular legend.
diff --git a/google/appengine/_internal/graphy/line_chart.py b/google/appengine/_internal/graphy/line_chart.py
new file mode 100644
index 0000000..d7504f0
--- /dev/null
+++ b/google/appengine/_internal/graphy/line_chart.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code related to line charts."""
+
+import copy
+import warnings
+
+from google.appengine._internal.graphy import common
+
+
+class LineStyle(object):
+
+ """Represents the style for a line on a line chart. Also provides some
+ convenient presets.
+
+ Object attributes (Passed directly to the Google Chart API. Check there for
+ details):
+ width: Width of the line
+ on: Length of a line segment (for dashed/dotted lines)
+ off: Length of a break (for dashed/dotted lines)
+ color: Color of the line. A hex string, like 'ff0000' for red. Optional,
+ AutoColor will fill this in for you automatically if empty.
+
+ Some common styles, such as LineStyle.dashed, are available:
+ solid
+ dashed
+ dotted
+ thick_solid
+ thick_dashed
+ thick_dotted
+ """
+
+ # Widths
+ THIN = 1
+ THICK = 2
+
+ # Patterns
+ # ((on, off) tuples, as passed to LineChart.AddLine)
+ SOLID = (1, 0)
+ DASHED = (8, 4)
+ DOTTED = (2, 4)
+
+ def __init__(self, width, on, off, color=None):
+ """Construct a LineStyle. See class docstring for details on args."""
+ self.width = width
+ self.on = on
+ self.off = off
+ self.color = color
+
+
+LineStyle.solid = LineStyle(1, 1, 0)
+LineStyle.dashed = LineStyle(1, 8, 4)
+LineStyle.dotted = LineStyle(1, 2, 4)
+LineStyle.thick_solid = LineStyle(2, 1, 0)
+LineStyle.thick_dashed = LineStyle(2, 8, 4)
+LineStyle.thick_dotted = LineStyle(2, 2, 4)
+
+
+class LineChart(common.BaseChart):
+
+ """Represents a line chart."""
+
+ def __init__(self, points=None):
+ super(LineChart, self).__init__()
+ if points is not None:
+ self.AddLine(points)
+
+ def AddLine(self, points, label=None, color=None,
+ pattern=LineStyle.SOLID, width=LineStyle.THIN, markers=None):
+ """Add a new line to the chart.
+
+ This is a convenience method which constructs the DataSeries and appends it
+ for you. It returns the new series.
+
+ points: List of equally-spaced y-values for the line
+ label: Name of the line (used for the legend)
+ color: Hex string, like 'ff0000' for red
+ pattern: Tuple for (length of segment, length of gap). i.e.
+ LineStyle.DASHED
+ width: Width of the line (i.e. LineStyle.THIN)
+ markers: List of Marker objects to attach to this line (see DataSeries
+ for more info)
+ """
+ if color is not None and isinstance(color[0], common.Marker):
+ warnings.warn('Your code may be broken! '
+ 'You passed a list of Markers instead of a color. The '
+ 'old argument order (markers before color) is deprecated.',
+ DeprecationWarning, stacklevel=2)
+ style = LineStyle(width, pattern[0], pattern[1], color=color)
+ series = common.DataSeries(points, label=label, style=style,
+ markers=markers)
+ self.data.append(series)
+ return series
+
+ def AddSeries(self, points, color=None, style=LineStyle.solid, markers=None,
+ label=None):
+ """DEPRECATED"""
+ warnings.warn('LineChart.AddSeries is deprecated. Call AddLine instead. ',
+ DeprecationWarning, stacklevel=2)
+ return self.AddLine(points, color=color, width=style.width,
+ pattern=(style.on, style.off), markers=markers,
+ label=label)
+
+
+class Sparkline(LineChart):
+ """Represent a sparkline. These behave like LineCharts,
+ mostly, but come without axes.
+ """
diff --git a/google/appengine/_internal/graphy/pie_chart.py b/google/appengine/_internal/graphy/pie_chart.py
new file mode 100644
index 0000000..fc6f86d
--- /dev/null
+++ b/google/appengine/_internal/graphy/pie_chart.py
@@ -0,0 +1,178 @@
+#!/usr/bin/python2.4
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code for pie charts."""
+
+import warnings
+
+from google.appengine._internal.graphy import common
+from google.appengine._internal.graphy import util
+
+
+class Segment(common.DataSeries):
+ """A single segment of the pie chart.
+
+ Object attributes:
+ size: relative size of the segment
+ label: label of the segment (if any)
+ color: color of the segment (if any)
+ """
+ def __init__(self, size, label=None, color=None):
+ if label is not None and util._IsColor(label):
+ warnings.warn('Your code may be broken! '
+ 'Label looks like a hex triplet; it might be a color. '
+ 'The old argument order (color before label) is '
+ 'deprecated.',
+ DeprecationWarning, stacklevel=2)
+ style = common._BasicStyle(color)
+ super(Segment, self).__init__([size], label=label, style=style)
+ assert size >= 0
+
+ def _GetSize(self):
+ return self.data[0]
+
+ def _SetSize(self, value):
+ assert value >= 0
+ self.data[0] = value
+
+ size = property(_GetSize, _SetSize,
+ doc = """The relative size of this pie segment.""")
+
+ # Since Segments are so simple, provide color for convenience.
+ def _GetColor(self):
+ return self.style.color
+
+ def _SetColor(self, color):
+ self.style.color = color
+
+ color = property(_GetColor, _SetColor,
+ doc = """The color of this pie segment.""")
+
+
+class PieChart(common.BaseChart):
+ """Represents a pie chart.
+
+ The pie chart consists of a single "pie" by default, but additional pies
+ may be added using the AddPie method. The Google Chart API will display
+ the pies as concentric circles, with pie #0 on the inside; other backends
+ may display the pies differently.
+ """
+
+ def __init__(self, points=None, labels=None, colors=None):
+ """Constructor for PieChart objects.
+
+ Creates a pie chart with a single pie.
+
+ Args:
+ points: A list of data points for the pie chart;
+ i.e., relative sizes of the pie segments
+ labels: A list of labels for the pie segments.
+ TODO: Allow the user to pass in None as one of
+ the labels in order to skip that label.
+ colors: A list of colors for the pie segments, as hex strings
+ (f.ex. '0000ff' for blue). If there are less colors than pie
+ segments, the Google Chart API will attempt to produce a smooth
+ color transition between segments by spreading the colors across
+ them.
+ """
+ super(PieChart, self).__init__()
+ self.formatters = []
+ self._colors = None
+ if points:
+ self.AddPie(points, labels, colors)
+
+ def AddPie(self, points, labels=None, colors=None):
+ """Add a whole pie to the chart.
+
+ Args:
+ points: A list of pie segment sizes
+ labels: A list of labels for the pie segments
+ colors: A list of colors for the segments. Missing colors will be chosen
+ automatically.
+ Return:
+ The index of the newly added pie.
+ """
+ num_colors = len(colors or [])
+ num_labels = len(labels or [])
+ pie_index = len(self.data)
+ self.data.append([])
+ for i, pt in enumerate(points):
+ label = None
+ if i < num_labels:
+ label = labels[i]
+ color = None
+ if i < num_colors:
+ color = colors[i]
+ self.AddSegment(pt, label=label, color=color, pie_index=pie_index)
+ return pie_index
+
+ def AddSegments(self, points, labels, colors):
+ """DEPRECATED."""
+ warnings.warn('PieChart.AddSegments is deprecated. Call AddPie instead. ',
+ DeprecationWarning, stacklevel=2)
+ num_colors = len(colors or [])
+ for i, pt in enumerate(points):
+ assert pt >= 0
+ label = labels[i]
+ color = None
+ if i < num_colors:
+ color = colors[i]
+ self.AddSegment(pt, label=label, color=color)
+
+ def AddSegment(self, size, label=None, color=None, pie_index=0):
+ """Add a pie segment to this chart, and return the segment.
+
+ size: The size of the segment.
+ label: The label for the segment.
+ color: The color of the segment, or None to automatically choose the color.
+ pie_index: The index of the pie that will receive the new segment.
+ By default, the chart has one pie (pie #0); use the AddPie method to
+ add more pies.
+ """
+ if isinstance(size, Segment):
+ warnings.warn("AddSegment(segment) is deprecated. Use AddSegment(size, "
+ "label, color) instead", DeprecationWarning, stacklevel=2)
+ segment = size
+ else:
+ segment = Segment(size, label=label, color=color)
+ assert segment.size >= 0
+ if pie_index == 0 and not self.data:
+ # Create the default pie
+ self.data.append([])
+ assert (pie_index >= 0 and pie_index < len(self.data))
+ self.data[pie_index].append(segment)
+ return segment
+
+ def AddSeries(self, points, color=None, style=None, markers=None, label=None):
+ """DEPRECATED
+
+ Add a new segment to the chart and return it.
+
+ The segment must contain exactly one data point; all parameters
+ other than color and label are ignored.
+ """
+ warnings.warn('PieChart.AddSeries is deprecated. Call AddSegment or '
+ 'AddSegments instead.', DeprecationWarning)
+ return self.AddSegment(Segment(points[0], color=color, label=label))
+
+ def SetColors(self, *colors):
+ """Change the colors of this chart to the specified list of colors.
+
+ Note that this will completely override the individual colors specified
+ in the pie segments. Missing colors will be interpolated, so that the
+ list of colors covers all segments in all the pies.
+ """
+ self._colors = colors
diff --git a/google/appengine/_internal/graphy/util.py b/google/appengine/_internal/graphy/util.py
new file mode 100644
index 0000000..01c21de
--- /dev/null
+++ b/google/appengine/_internal/graphy/util.py
@@ -0,0 +1,13 @@
+def _IsColor(color):
+ """Try to determine if color is a hex color string.
+ Labels that look like hex colors will match too, unfortunately."""
+ if not isinstance(color, basestring):
+ return False
+ color = color.strip('#')
+ if len(color) != 3 and len(color) != 6:
+ return False
+ hex_letters = '0123456789abcdefABCDEF'
+ for letter in color:
+ if letter not in hex_letters:
+ return False
+ return True
diff --git a/google/appengine/api/api_base_pb.py b/google/appengine/api/api_base_pb.py
index 23af6f9..0d4cb50 100644
--- a/google/appengine/api/api_base_pb.py
+++ b/google/appengine/api/api_base_pb.py
@@ -123,6 +123,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.StringProto'
class Integer32Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
@@ -222,6 +223,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.Integer32Proto'
class Integer64Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
@@ -321,6 +323,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.Integer64Proto'
class BoolProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
@@ -418,6 +421,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.BoolProto'
class DoubleProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0.0
@@ -515,6 +519,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.DoubleProto'
class BytesProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = ""
@@ -614,6 +619,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.BytesProto'
class VoidProto(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -678,5 +684,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.base.VoidProto'
__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','BytesProto','VoidProto']
diff --git a/google/appengine/api/app_identity/app_identity_service_pb.py b/google/appengine/api/app_identity/app_identity_service_pb.py
index 5d620d1..0ce6c54 100644
--- a/google/appengine/api/app_identity/app_identity_service_pb.py
+++ b/google/appengine/api/app_identity/app_identity_service_pb.py
@@ -158,6 +158,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.AppIdentityServiceError'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3JzeglFcnJvckNvZGWLAZIBB1NVQ0NFU1OYAQCMAYsBkgENVU5LTk9XTl9TQ09QRZgBCYwBiwGSAQ5CTE9CX1RPT19MQVJHRZgB6AeMAYsBkgERREVBRExJTkVfRVhDRUVERUSYAekHjAGLAZIBD05PVF9BX1ZBTElEX0FQUJgB6geMAYsBkgENVU5LTk9XTl9FUlJPUpgB6weMAYsBkgEZR0FJQU1JTlRfTk9UX0lOSVRJQUlMSVpFRJgB7AeMAXS6AaMKCjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8SCmFwcGhvc3RpbmcivgEKF0FwcElkZW50aXR5U2VydmljZUVycm9yIqIBCglFcnJvckNvZGUSCwoHU1VDQ0VTUxAAEhEKDVVOS05PV05fU0NPUEUQCRITCg5CTE9CX1RPT19MQVJHRRDoBxIWChFERUFETElORV9FWENFRURFRBDpBxIUCg9OT1RfQV9WQUxJRF9BUFAQ6gcSEgoNVU5LTk9XTl9FUlJPUhDrBxIeChlHQUlBTUlOVF9OT1RfSU5JVElBSUxJWkVEEOwHIioKEVNpZ25Gb3JBcHBSZXF1ZXN0EhUKDWJ5dGVzX3RvX3NpZ24YASABKAwiPwoSU2lnbkZvckFwcFJlc3BvbnNlEhAKCGtleV9uYW1lGAEgASgJEhcKD3NpZ25hdHVyZV9ieXRlcxgCIAEoDCIjCiFHZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlcXVlc3QiQwoRUHVibGljQ2VydGlmaWNhdGUSEAoIa2V5X25hbWUYASABKAkSHAoUeDUwOV9jZXJ0aWZpY2F0ZV9wZW0YAiABKAkijQEKIkdldFB1YmxpY0NlcnRpZmljYXRlRm9yQXBwUmVzcG9uc2USPgoXcHVibGljX2NlcnRpZmljYXRlX2xpc3QYASADKAsyHS5hcHBob3N0aW5nLlB1YmxpY0NlcnRpZmljYXRlEicKH21heF9jbGllbnRfY2FjaGVfdGltZV9pbl9zZWNvbmQYAiABKAMiHgocR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdCI9Ch1HZXRTZXJ2aWNlQWNjb3VudE5hbWVSZXNwb25zZRIcChRzZXJ2aWNlX2FjY291bnRfbmFtZRgBIAEoCSImChVHZXRBY2Nlc3NUb2tlblJlcXVlc3QSDQoFc2NvcGUYASADKAkiRwoWR2V0QWNjZXNzVG9rZW5SZXNwb25zZRIUCgxhY2Nlc3NfdG9rZW4YASABKAkSFwoPZXhwaXJhdGlvbl90aW1lGAIgASgDMqIDCg5TaWduaW5nU2VydmljZRJLCgpTaWduRm9yQXBwEh0uYXBwaG9zdGluZy5TaWduRm9yQXBwUmVxdWVzdBoeLmFwcGhvc3RpbmcuU2lnbkZvckFwcFJlc3BvbnNlEnwKG0dldFB1YmxpY0NlcnRpZmljYXRlc0ZvckFwcBItLmFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0Gi4uYXBwaG9zdGluZy5HZXRQdWJsaWNDZXJ0aWZpY2F0ZUZvckFwcFJlc3BvbnNlEmwKFUdldFNlcnZpY2VBY2NvdW50TmFtZRIoLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdBopLmFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2USVwoOR2V0QWNjZXNzVG9rZW4SIS5hcHBob3N0aW5nLkdldEFjY2Vzc1Rva2VuUmVxdWVzdBoiLmFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXNwb25zZUJACiRjb20uZ29vZ2xlLmFwcGVuZ2luZS5hcGkuYXBwaWRlbnRpdHkgASgCQhRBcHBJZGVudGl0eVNlcnZpY2VQYg=="))
if _net_proto___parse__python is not None:
@@ -285,6 +286,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SignForAppRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KHGFwcGhvc3RpbmcuU2lnbkZvckFwcFJlcXVlc3QTGg1ieXRlc190b19zaWduIAEoAjAJOAEUwgEiYXBwaG9zdGluZy5BcHBJZGVudGl0eVNlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
@@ -446,6 +448,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SignForAppResponse'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KHWFwcGhvc3RpbmcuU2lnbkZvckFwcFJlc3BvbnNlExoIa2V5X25hbWUgASgCMAk4ARQTGg9zaWduYXR1cmVfYnl0ZXMgAigCMAk4ARTCASJhcHBob3N0aW5nLkFwcElkZW50aXR5U2VydmljZUVycm9y"))
if _net_proto___parse__python is not None:
@@ -543,6 +546,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetPublicCertificateForAppRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KLGFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXF1ZXN0wgEiYXBwaG9zdGluZy5BcHBJZGVudGl0eVNlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
@@ -704,6 +708,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.PublicCertificate'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KHGFwcGhvc3RpbmcuUHVibGljQ2VydGlmaWNhdGUTGghrZXlfbmFtZSABKAIwCTgBFBMaFHg1MDlfY2VydGlmaWNhdGVfcGVtIAIoAjAJOAEUwgEiYXBwaG9zdGluZy5BcHBJZGVudGl0eVNlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
@@ -884,6 +889,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetPublicCertificateForAppResponse'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KLWFwcGhvc3RpbmcuR2V0UHVibGljQ2VydGlmaWNhdGVGb3JBcHBSZXNwb25zZRMaF3B1YmxpY19jZXJ0aWZpY2F0ZV9saXN0IAEoAjALOANKHGFwcGhvc3RpbmcuUHVibGljQ2VydGlmaWNhdGWjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExofbWF4X2NsaWVudF9jYWNoZV90aW1lX2luX3NlY29uZCACKAAwAzgBFMIBImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3I="))
if _net_proto___parse__python is not None:
@@ -981,6 +987,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetServiceAccountNameRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KJ2FwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVxdWVzdMIBImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3I="))
if _net_proto___parse__python is not None:
@@ -1108,6 +1115,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetServiceAccountNameResponse'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KKGFwcGhvc3RpbmcuR2V0U2VydmljZUFjY291bnROYW1lUmVzcG9uc2UTGhRzZXJ2aWNlX2FjY291bnRfbmFtZSABKAIwCTgBFMIBImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3I="))
if _net_proto___parse__python is not None:
@@ -1244,6 +1252,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetAccessTokenRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KIGFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXF1ZXN0ExoFc2NvcGUgASgCMAk4AxTCASJhcHBob3N0aW5nLkFwcElkZW50aXR5U2VydmljZUVycm9y"))
if _net_proto___parse__python is not None:
@@ -1405,6 +1414,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetAccessTokenResponse'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WjZhcHBob3N0aW5nL2FwaS9hcHBfaWRlbnRpdHkvYXBwX2lkZW50aXR5X3NlcnZpY2UucHJvdG8KIWFwcGhvc3RpbmcuR2V0QWNjZXNzVG9rZW5SZXNwb25zZRMaDGFjY2Vzc190b2tlbiABKAIwCTgBFBMaD2V4cGlyYXRpb25fdGltZSACKAAwAzgBFMIBImFwcGhvc3RpbmcuQXBwSWRlbnRpdHlTZXJ2aWNlRXJyb3I="))
if _net_proto___parse__python is not None:
diff --git a/google/appengine/api/app_logging.py b/google/appengine/api/app_logging.py
index 632c133..c2e4b71 100755
--- a/google/appengine/api/app_logging.py
+++ b/google/appengine/api/app_logging.py
@@ -34,8 +34,6 @@
import logging
-import sys
-import types
from google.appengine.api import logservice
@@ -80,7 +78,6 @@
"""Closes the stream.
This implementation based on the implementation of FileHandler.close()."""
- self.flush()
self.stream.close()
logging.StreamHandler.close(self)
@@ -93,11 +90,11 @@
message = self._AppLogsMessage(record)
if isinstance(message, unicode):
message = message.encode("UTF-8")
- self.stream.write(message)
- self.flush()
- logservice.auto_flush(lines_emitted=1)
+
+
+ logservice.write(message)
except (KeyboardInterrupt, SystemExit):
raise
except:
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index c320e03..bd57159 100755
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -198,13 +198,16 @@
SUPPORTED_LIBRARIES = {
'django': ['1.2'],
+ 'jinja2': ['2.6'],
'lxml': ['2.3'],
+ 'markupsafe': ['0.15'],
'numpy': ['1.5.1'],
'PIL': ['1.1.7'],
'pycrypto': ['2.3'],
- 'yaml': ['3.05'],
- 'webapp2': ['2.0.2'],
+ 'setuptools': ['0.6c11'],
+ 'webapp2': ['2.0.2', '2.2.3', '2.3'],
'webob': ['1.0.8'],
+ 'yaml': ['3.05'],
}
diff --git a/google/appengine/api/appinfo_includes.py b/google/appengine/api/appinfo_includes.py
index 3dd6114..2a4053c 100755
--- a/google/appengine/api/appinfo_includes.py
+++ b/google/appengine/api/appinfo_includes.py
@@ -104,13 +104,14 @@
appinfo.AppInclude(builtins=appyaml.builtins,
includes=appyaml.includes),
os.path.dirname(appinfo_path),
+ appyaml.runtime,
open_fn=open_fn))
return appinfo.AppInclude.MergeAppYamlAppInclude(appyaml,
aggregate_appinclude)
-def _ResolveIncludes(included_from, app_include, basepath, state=None,
+def _ResolveIncludes(included_from, app_include, basepath, runtime, state=None,
open_fn=open):
"""Recursively includes all encountered builtins/includes directives.
@@ -124,6 +125,7 @@
included_from: file that included file was included from.
app_include: the AppInclude object to resolve.
basepath: application basepath.
+ runtime: name of the runtime.
state: contains the list of included and excluded files as well as the
directives of all encountered AppInclude objects.
open_fn: file opening function udes, used when reading yaml files.
@@ -157,7 +159,7 @@
includes_list = _ConvertBuiltinsToIncludes(included_from, app_include,
- state)
+ state, runtime)
includes_list.extend(app_include.includes or [])
@@ -177,7 +179,7 @@
yaml_file = open_fn(inc_path, 'r')
try:
inc_yaml = appinfo.LoadAppInclude(yaml_file)
- _ResolveIncludes(inc_path, inc_yaml, basepath, state=state,
+ _ResolveIncludes(inc_path, inc_yaml, basepath, runtime, state=state,
open_fn=open_fn)
except appinfo_errors.EmptyConfigurationFile:
@@ -188,7 +190,7 @@
return state.aggregate_appinclude
-def _ConvertBuiltinsToIncludes(included_from, app_include, state):
+def _ConvertBuiltinsToIncludes(included_from, app_include, state, runtime):
"""Converts builtins directives to includes directives.
Moves all builtins directives in app_include into the includes
@@ -201,6 +203,7 @@
app_include: the AppInclude object currently being processed.
state: contains the list of included and excluded files as well as the
directives of all encountered AppInclude objects.
+ runtime: name of the runtime.
Returns:
list of the absolute paths to the include files for builtins where
@@ -216,7 +219,7 @@
continue
- yaml_path = builtins.get_yaml_path(builtin_name)
+ yaml_path = builtins.get_yaml_path(builtin_name, runtime)
if on_or_off == 'on':
includes_list.append(yaml_path)
diff --git a/google/appengine/api/blobstore/blobstore.py b/google/appengine/api/blobstore/blobstore.py
index 919be68..046b013 100755
--- a/google/appengine/api/blobstore/blobstore.py
+++ b/google/appengine/api/blobstore/blobstore.py
@@ -189,19 +189,62 @@
def create_upload_url(success_path,
- _make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ _make_sync_call=None,
+ **kwargs):
"""Create upload URL for POST form.
Args:
success_path: Path within application to call when POST is successful
and upload is complete.
_make_sync_call: Used for dependency injection in tests.
+ max_bytes_per_blob: The maximum size in bytes that any one blob in the
+ upload can be or None for no maximum size.
+ max_bytes_total: The maximum size in bytes that the aggregate sizes of all
+ of the blobs in the upload can be or None for no maximum size.
+
+ Raises:
+ TypeError: If max_bytes_per_blob or max_bytes_total are not integral types.
+ ValueError: If max_bytes_per_blob or max_bytes_total are not
+ positive values.
"""
request = blobstore_service_pb.CreateUploadURLRequest()
response = blobstore_service_pb.CreateUploadURLResponse()
request.set_success_path(success_path)
+
+ make_sync_call = kwargs.pop('_make_sync_call',
+ apiproxy_stub_map.MakeSyncCall)
+
+ if _make_sync_call:
+ if callable(_make_sync_call):
+ make_sync_call = _make_sync_call
+ else:
+ raise TypeError('_make_sync_call must be callable')
+
+ if 'max_bytes_per_blob' in kwargs:
+ max_bytes_per_blob = kwargs.pop('max_bytes_per_blob')
+ if not isinstance(max_bytes_per_blob, (int, long)):
+ raise TypeError('max_bytes_per_blob must be integer.')
+ if max_bytes_per_blob < 1:
+ raise ValueError('max_bytes_per_blob must be positive.')
+ request.set_max_upload_size_per_blob_bytes(max_bytes_per_blob)
+
+ if 'max_bytes_total' in kwargs:
+ max_bytes_total = kwargs.pop('max_bytes_total')
+ if not isinstance(max_bytes_total, (int, long)):
+ raise TypeError('max_bytes_total must be integer.')
+ if max_bytes_total < 1:
+ raise ValueError('max_bytes_total must be positive.')
+ request.set_max_upload_size_bytes(max_bytes_total)
+
+ if (request.has_max_upload_size_bytes() and
+ request.has_max_upload_size_per_blob_bytes()):
+ if (request.max_upload_size_bytes() <
+ request.max_upload_size_per_blob_bytes()):
+ raise ValueError('max_bytes_total can not be less'
+ ' than max_upload_size_per_blob_bytes')
+
try:
- _make_sync_call('blobstore', 'CreateUploadURL', request, response)
+ make_sync_call('blobstore', 'CreateUploadURL', request, response)
except apiproxy_errors.ApplicationError, e:
raise _ToBlobstoreError(e)
diff --git a/google/appengine/api/blobstore/blobstore_service_pb.py b/google/appengine/api/blobstore/blobstore_service_pb.py
index 6148be2..4a41fd1 100644
--- a/google/appengine/api/blobstore/blobstore_service_pb.py
+++ b/google/appengine/api/blobstore/blobstore_service_pb.py
@@ -36,6 +36,7 @@
BLOB_NOT_FOUND = 4
DATA_INDEX_OUT_OF_RANGE = 5
BLOB_FETCH_SIZE_TOO_LARGE = 6
+ ARGUMENT_OUT_OF_RANGE = 8
_ErrorCode_NAMES = {
0: "OK",
@@ -45,6 +46,7 @@
4: "BLOB_NOT_FOUND",
5: "DATA_INDEX_OUT_OF_RANGE",
6: "BLOB_FETCH_SIZE_TOO_LARGE",
+ 8: "ARGUMENT_OUT_OF_RANGE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -113,9 +115,14 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.BlobstoreServiceError'
class CreateUploadURLRequest(ProtocolBuffer.ProtocolMessage):
has_success_path_ = 0
success_path_ = ""
+ has_max_upload_size_bytes_ = 0
+ max_upload_size_bytes_ = 0
+ has_max_upload_size_per_blob_bytes_ = 0
+ max_upload_size_per_blob_bytes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
@@ -133,15 +140,47 @@
def has_success_path(self): return self.has_success_path_
+ def max_upload_size_bytes(self): return self.max_upload_size_bytes_
+
+ def set_max_upload_size_bytes(self, x):
+ self.has_max_upload_size_bytes_ = 1
+ self.max_upload_size_bytes_ = x
+
+ def clear_max_upload_size_bytes(self):
+ if self.has_max_upload_size_bytes_:
+ self.has_max_upload_size_bytes_ = 0
+ self.max_upload_size_bytes_ = 0
+
+ def has_max_upload_size_bytes(self): return self.has_max_upload_size_bytes_
+
+ def max_upload_size_per_blob_bytes(self): return self.max_upload_size_per_blob_bytes_
+
+ def set_max_upload_size_per_blob_bytes(self, x):
+ self.has_max_upload_size_per_blob_bytes_ = 1
+ self.max_upload_size_per_blob_bytes_ = x
+
+ def clear_max_upload_size_per_blob_bytes(self):
+ if self.has_max_upload_size_per_blob_bytes_:
+ self.has_max_upload_size_per_blob_bytes_ = 0
+ self.max_upload_size_per_blob_bytes_ = 0
+
+ def has_max_upload_size_per_blob_bytes(self): return self.has_max_upload_size_per_blob_bytes_
+
def MergeFrom(self, x):
assert x is not self
if (x.has_success_path()): self.set_success_path(x.success_path())
+ if (x.has_max_upload_size_bytes()): self.set_max_upload_size_bytes(x.max_upload_size_bytes())
+ if (x.has_max_upload_size_per_blob_bytes()): self.set_max_upload_size_per_blob_bytes(x.max_upload_size_per_blob_bytes())
def Equals(self, x):
if x is self: return 1
if self.has_success_path_ != x.has_success_path_: return 0
if self.has_success_path_ and self.success_path_ != x.success_path_: return 0
+ if self.has_max_upload_size_bytes_ != x.has_max_upload_size_bytes_: return 0
+ if self.has_max_upload_size_bytes_ and self.max_upload_size_bytes_ != x.max_upload_size_bytes_: return 0
+ if self.has_max_upload_size_per_blob_bytes_ != x.has_max_upload_size_per_blob_bytes_: return 0
+ if self.has_max_upload_size_per_blob_bytes_ and self.max_upload_size_per_blob_bytes_ != x.max_upload_size_per_blob_bytes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -155,6 +194,8 @@
def ByteSize(self):
n = 0
n += self.lengthString(len(self.success_path_))
+ if (self.has_max_upload_size_bytes_): n += 1 + self.lengthVarInt64(self.max_upload_size_bytes_)
+ if (self.has_max_upload_size_per_blob_bytes_): n += 1 + self.lengthVarInt64(self.max_upload_size_per_blob_bytes_)
return n + 1
def ByteSizePartial(self):
@@ -162,19 +203,35 @@
if (self.has_success_path_):
n += 1
n += self.lengthString(len(self.success_path_))
+ if (self.has_max_upload_size_bytes_): n += 1 + self.lengthVarInt64(self.max_upload_size_bytes_)
+ if (self.has_max_upload_size_per_blob_bytes_): n += 1 + self.lengthVarInt64(self.max_upload_size_per_blob_bytes_)
return n
def Clear(self):
self.clear_success_path()
+ self.clear_max_upload_size_bytes()
+ self.clear_max_upload_size_per_blob_bytes()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.success_path_)
+ if (self.has_max_upload_size_bytes_):
+ out.putVarInt32(16)
+ out.putVarInt64(self.max_upload_size_bytes_)
+ if (self.has_max_upload_size_per_blob_bytes_):
+ out.putVarInt32(24)
+ out.putVarInt64(self.max_upload_size_per_blob_bytes_)
def OutputPartial(self, out):
if (self.has_success_path_):
out.putVarInt32(10)
out.putPrefixedString(self.success_path_)
+ if (self.has_max_upload_size_bytes_):
+ out.putVarInt32(16)
+ out.putVarInt64(self.max_upload_size_bytes_)
+ if (self.has_max_upload_size_per_blob_bytes_):
+ out.putVarInt32(24)
+ out.putVarInt64(self.max_upload_size_per_blob_bytes_)
def TryMerge(self, d):
while d.avail() > 0:
@@ -182,6 +239,12 @@
if tt == 10:
self.set_success_path(d.getPrefixedString())
continue
+ if tt == 16:
+ self.set_max_upload_size_bytes(d.getVarInt64())
+ continue
+ if tt == 24:
+ self.set_max_upload_size_per_blob_bytes(d.getVarInt64())
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -191,6 +254,8 @@
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_success_path_: res+=prefix+("success_path: %s\n" % self.DebugFormatString(self.success_path_))
+ if self.has_max_upload_size_bytes_: res+=prefix+("max_upload_size_bytes: %s\n" % self.DebugFormatInt64(self.max_upload_size_bytes_))
+ if self.has_max_upload_size_per_blob_bytes_: res+=prefix+("max_upload_size_per_blob_bytes: %s\n" % self.DebugFormatInt64(self.max_upload_size_per_blob_bytes_))
return res
@@ -198,20 +263,27 @@
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksuccess_path = 1
+ kmax_upload_size_bytes = 2
+ kmax_upload_size_per_blob_bytes = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "success_path",
- }, 1)
+ 2: "max_upload_size_bytes",
+ 3: "max_upload_size_per_blob_bytes",
+ }, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
- }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateUploadURLRequest'
class CreateUploadURLResponse(ProtocolBuffer.ProtocolMessage):
has_url_ = 0
url_ = ""
@@ -311,6 +383,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateUploadURLResponse'
class DeleteBlobRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -414,6 +487,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DeleteBlobRequest'
class FetchDataRequest(ProtocolBuffer.ProtocolMessage):
has_blob_key_ = 0
blob_key_ = ""
@@ -591,6 +665,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.FetchDataRequest'
class FetchDataResponse(ProtocolBuffer.ProtocolMessage):
has_data_ = 0
data_ = ""
@@ -690,6 +765,285 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.FetchDataResponse'
+class CloneBlobRequest(ProtocolBuffer.ProtocolMessage):
+ has_blob_key_ = 0
+ blob_key_ = ""
+ has_mime_type_ = 0
+ mime_type_ = ""
+ has_target_app_id_ = 0
+ target_app_id_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def blob_key(self): return self.blob_key_
+
+ def set_blob_key(self, x):
+ self.has_blob_key_ = 1
+ self.blob_key_ = x
+
+ def clear_blob_key(self):
+ if self.has_blob_key_:
+ self.has_blob_key_ = 0
+ self.blob_key_ = ""
+
+ def has_blob_key(self): return self.has_blob_key_
+
+ def mime_type(self): return self.mime_type_
+
+ def set_mime_type(self, x):
+ self.has_mime_type_ = 1
+ self.mime_type_ = x
+
+ def clear_mime_type(self):
+ if self.has_mime_type_:
+ self.has_mime_type_ = 0
+ self.mime_type_ = ""
+
+ def has_mime_type(self): return self.has_mime_type_
+
+ def target_app_id(self): return self.target_app_id_
+
+ def set_target_app_id(self, x):
+ self.has_target_app_id_ = 1
+ self.target_app_id_ = x
+
+ def clear_target_app_id(self):
+ if self.has_target_app_id_:
+ self.has_target_app_id_ = 0
+ self.target_app_id_ = ""
+
+ def has_target_app_id(self): return self.has_target_app_id_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_blob_key()): self.set_blob_key(x.blob_key())
+ if (x.has_mime_type()): self.set_mime_type(x.mime_type())
+ if (x.has_target_app_id()): self.set_target_app_id(x.target_app_id())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_blob_key_ != x.has_blob_key_: return 0
+ if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0
+ if self.has_mime_type_ != x.has_mime_type_: return 0
+ if self.has_mime_type_ and self.mime_type_ != x.mime_type_: return 0
+ if self.has_target_app_id_ != x.has_target_app_id_: return 0
+ if self.has_target_app_id_ and self.target_app_id_ != x.target_app_id_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_blob_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: blob_key not set.')
+ if (not self.has_mime_type_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: mime_type not set.')
+ if (not self.has_target_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: target_app_id not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.blob_key_))
+ n += self.lengthString(len(self.mime_type_))
+ n += self.lengthString(len(self.target_app_id_))
+ return n + 3
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_blob_key_):
+ n += 1
+ n += self.lengthString(len(self.blob_key_))
+ if (self.has_mime_type_):
+ n += 1
+ n += self.lengthString(len(self.mime_type_))
+ if (self.has_target_app_id_):
+ n += 1
+ n += self.lengthString(len(self.target_app_id_))
+ return n
+
+ def Clear(self):
+ self.clear_blob_key()
+ self.clear_mime_type()
+ self.clear_target_app_id()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.mime_type_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.target_app_id_)
+
+ def OutputPartial(self, out):
+ if (self.has_blob_key_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+ if (self.has_mime_type_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.mime_type_)
+ if (self.has_target_app_id_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.target_app_id_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_blob_key(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_mime_type(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_target_app_id(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_))
+ if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatString(self.mime_type_))
+ if self.has_target_app_id_: res+=prefix+("target_app_id: %s\n" % self.DebugFormatString(self.target_app_id_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kblob_key = 1
+ kmime_type = 2
+ ktarget_app_id = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "blob_key",
+ 2: "mime_type",
+ 3: "target_app_id",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CloneBlobRequest'
+class CloneBlobResponse(ProtocolBuffer.ProtocolMessage):
+ has_blob_key_ = 0
+ blob_key_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def blob_key(self): return self.blob_key_
+
+ def set_blob_key(self, x):
+ self.has_blob_key_ = 1
+ self.blob_key_ = x
+
+ def clear_blob_key(self):
+ if self.has_blob_key_:
+ self.has_blob_key_ = 0
+ self.blob_key_ = ""
+
+ def has_blob_key(self): return self.has_blob_key_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_blob_key()): self.set_blob_key(x.blob_key())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_blob_key_ != x.has_blob_key_: return 0
+ if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_blob_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: blob_key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.blob_key_))
+ return n + 1
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_blob_key_):
+ n += 1
+ n += self.lengthString(len(self.blob_key_))
+ return n
+
+ def Clear(self):
+ self.clear_blob_key()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+
+ def OutputPartial(self, out):
+ if (self.has_blob_key_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_blob_key(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kblob_key = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "blob_key",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CloneBlobResponse'
class DecodeBlobKeyRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -793,6 +1147,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DecodeBlobKeyRequest'
class DecodeBlobKeyResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -896,5 +1251,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DecodeBlobKeyResponse'
-__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','FetchDataRequest','FetchDataResponse','DecodeBlobKeyRequest','DecodeBlobKeyResponse']
+__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','FetchDataRequest','FetchDataResponse','CloneBlobRequest','CloneBlobResponse','DecodeBlobKeyRequest','DecodeBlobKeyResponse']
diff --git a/google/appengine/api/blobstore/blobstore_stub.py b/google/appengine/api/blobstore/blobstore_stub.py
index 15d5193..2b424f3 100755
--- a/google/appengine/api/blobstore/blobstore_stub.py
+++ b/google/appengine/api/blobstore/blobstore_stub.py
@@ -65,7 +65,11 @@
_UPLOAD_SESSION_KIND = '__BlobUploadSession__'
-def CreateUploadSession(creation, success_path, user):
+def CreateUploadSession(creation,
+ success_path,
+ user,
+ max_bytes_per_blob,
+ max_bytes_total):
"""Create upload session in datastore.
Creates an upload session and puts it in Datastore to be referenced by
@@ -75,6 +79,8 @@
creation: Creation timestamp.
success_path: Path in users application to call upon success.
user: User that initiated this upload, if any.
+ max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
+ max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
Returns:
String encoded key of new Datastore entity.
@@ -83,7 +89,9 @@
entity.update({'creation': creation,
'success_path': success_path,
'user': user,
- 'state': 'init'})
+ 'state': 'init',
+ 'max_bytes_per_blob': max_bytes_per_blob,
+ 'max_bytes_total': max_bytes_total})
datastore.Put(entity)
return str(entity.key())
@@ -196,19 +204,27 @@
except KeyError:
raise ConfigurationError('%s is not set in environment.' % name)
- def _CreateSession(self, success_path, user):
+ def _CreateSession(self,
+ success_path,
+ user,
+ max_bytes_per_blob=None,
+ max_bytes_total=None):
"""Create new upload session.
Args:
success_path: Application path to call upon successful POST.
user: User that initiated the upload session.
+ max_bytes_per_blob: Maximum number of bytes for any blob in the upload.
+ max_bytes_total: Maximum aggregate bytes for all blobs in the upload.
Returns:
String encoded key of a new upload session created in the datastore.
"""
return CreateUploadSession(self.__time_function(),
success_path,
- user)
+ user,
+ max_bytes_per_blob,
+ max_bytes_total)
def _Dynamic_CreateUploadURL(self, request, response):
"""Create upload URL implementation.
@@ -221,8 +237,19 @@
request: A fully initialized CreateUploadURLRequest instance.
response: A CreateUploadURLResponse instance.
"""
+ max_bytes_per_blob = None
+ max_bytes_total = None
+
+ if request.has_max_upload_size_per_blob_bytes():
+ max_bytes_per_blob = request.max_upload_size_per_blob_bytes()
+
+ if request.has_max_upload_size_bytes():
+ max_bytes_total = request.max_upload_size_bytes()
+
session = self._CreateSession(request.success_path(),
- users.get_current_user())
+ users.get_current_user(),
+ max_bytes_per_blob,
+ max_bytes_total)
response.set_url('http://%s:%s/%s%s' % (self._GetEnviron('SERVER_NAME'),
self._GetEnviron('SERVER_PORT'),
diff --git a/google/appengine/api/capabilities/capability_service_pb.py b/google/appengine/api/capabilities/capability_service_pb.py
index d9394ce..478450d 100644
--- a/google/appengine/api/capabilities/capability_service_pb.py
+++ b/google/appengine/api/capabilities/capability_service_pb.py
@@ -211,6 +211,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IsEnabledRequest'
class IsEnabledResponse(ProtocolBuffer.ProtocolMessage):
@@ -416,5 +417,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IsEnabledResponse'
__all__ = ['IsEnabledRequest','IsEnabledResponse']
diff --git a/google/appengine/api/channel/channel_service_pb.py b/google/appengine/api/channel/channel_service_pb.py
index cfd229e..948afe8 100755
--- a/google/appengine/api/channel/channel_service_pb.py
+++ b/google/appengine/api/channel/channel_service_pb.py
@@ -105,6 +105,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ChannelServiceError'
class CreateChannelRequest(ProtocolBuffer.ProtocolMessage):
has_application_key_ = 0
application_key_ = ""
@@ -204,6 +205,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateChannelRequest'
class CreateChannelResponse(ProtocolBuffer.ProtocolMessage):
has_client_id_ = 0
client_id_ = ""
@@ -298,6 +300,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateChannelResponse'
class SendMessageRequest(ProtocolBuffer.ProtocolMessage):
has_application_key_ = 0
application_key_ = ""
@@ -436,5 +439,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SendMessageRequest'
__all__ = ['ChannelServiceError','CreateChannelRequest','CreateChannelResponse','SendMessageRequest']
diff --git a/google/appengine/api/conversion/conversion_service_pb.py b/google/appengine/api/conversion/conversion_service_pb.py
index eb60d2c..e4003d0 100644
--- a/google/appengine/api/conversion/conversion_service_pb.py
+++ b/google/appengine/api/conversion/conversion_service_pb.py
@@ -113,6 +113,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionServiceError'
class AssetInfo(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -275,6 +276,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.AssetInfo'
class DocumentInfo(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -388,6 +390,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DocumentInfo'
class ConversionInput_AuxData(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -521,6 +524,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionInput_AuxData'
class ConversionInput(ProtocolBuffer.ProtocolMessage):
has_input_ = 0
has_output_mime_type_ = 0
@@ -716,6 +720,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionInput'
class ConversionOutput(ProtocolBuffer.ProtocolMessage):
has_error_code_ = 0
error_code_ = 0
@@ -865,6 +870,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionOutput'
class ConversionRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -978,6 +984,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionRequest'
class ConversionResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1091,5 +1098,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ConversionResponse'
__all__ = ['ConversionServiceError','AssetInfo','DocumentInfo','ConversionInput_AuxData','ConversionInput','ConversionOutput','ConversionRequest','ConversionResponse']
diff --git a/google/appengine/api/conversion/conversion_stub.py b/google/appengine/api/conversion/conversion_stub.py
index deaf0eb..e264f43 100644
--- a/google/appengine/api/conversion/conversion_stub.py
+++ b/google/appengine/api/conversion/conversion_stub.py
@@ -35,19 +35,30 @@
from google.appengine.runtime import apiproxy_errors
-__all__ = ["ConversionServiceStub",
- "CONVERTED_FILES_STUB"]
+__all__ = ["ConversionServiceStub"]
-CONVERTED_FILES_STUB = {
+_CONVERTED_FILES_STUB = {
"text/html": "<b>Some data!</b>",
"text/plain": "Some data!",
}
+
+_SUPPORTED_PATHS = {
+ "application/pdf": ("image/png", "text/html", "text/plain"),
+ "image/bmp": ("application/pdf", "text/html", "text/plain"),
+ "image/gif": ("application/pdf", "text/html", "text/plain"),
+ "image/jpeg": ("application/pdf", "text/html", "text/plain"),
+ "image/png": ("application/pdf", "text/html", "text/plain"),
+ "text/html": ("application/pdf", "image/png", "text/plain"),
+ "text/plain": ("application/pdf", "image/png", "text/html"),
+ }
+
+
def _validate_conversion_request(request):
"""Validates ConversionRequest and throws ConversionServiceError if invalid.
@@ -97,11 +108,21 @@
+
+
+ input_mime_type = request.conversion(x).input().asset(0).mime_type()
output_mime_type = request.conversion(x).output_mime_type()
- if output_mime_type not in CONVERTED_FILES_STUB:
+ if not _is_supported(input_mime_type, output_mime_type):
raise apiproxy_errors.ApplicationError(
conversion_service_pb.ConversionServiceError.UNSUPPORTED_CONVERSION,
- "Output mime type %s is not supported" % output_mime_type)
+ "Conversion from %s to %s is not supported" %
+ (input_mime_type, output_mime_type))
+
+
+def _is_supported(input_mime_type, output_mime_type):
+ """Whether the conversion path is supported."""
+ return (input_mime_type in _SUPPORTED_PATHS and
+ output_mime_type in _SUPPORTED_PATHS[input_mime_type])
class ConversionServiceStub(apiproxy_stub.APIProxyStub):
@@ -129,7 +150,7 @@
output_mime_type = request.conversion(x).output_mime_type()
output_asset = result.mutable_output().add_asset()
output_asset.set_mime_type(output_mime_type)
- output_asset.set_data(CONVERTED_FILES_STUB[output_mime_type])
+ output_asset.set_data(_CONVERTED_FILES_STUB[output_mime_type])
first_input_asset = request.conversion(x).input().asset(0)
if first_input_asset.has_name():
output_asset.set_name(first_input_asset.name())
diff --git a/google/appengine/api/datastore.py b/google/appengine/api/datastore.py
index 8363ade..882448e 100755
--- a/google/appengine/api/datastore.py
+++ b/google/appengine/api/datastore.py
@@ -1016,7 +1016,8 @@
properties = [properties]
for prop in properties:
- if (prop.meaning() in datastore_types._RAW_PROPERTY_MEANINGS or
+ if ((prop.has_meaning() and
+ prop.meaning() in datastore_types._RAW_PROPERTY_MEANINGS) or
name in self.unindexed_properties()):
pb.raw_property_list().append(prop)
else:
diff --git a/google/appengine/api/datastore_file_stub.py b/google/appengine/api/datastore_file_stub.py
index a3c1902..aad6aca 100755
--- a/google/appengine/api/datastore_file_stub.py
+++ b/google/appengine/api/datastore_file_stub.py
@@ -669,7 +669,7 @@
finally:
self.__entities_lock.release()
- return datastore_stub_util._GetQueryCursor(results, query, filters, orders)
+ return datastore_stub_util._ExecuteQuery(results, query, filters, orders)
def _AllocateIds(self, reference, size=1, max_id=None):
datastore_stub_util.Check(not (size and max_id),
diff --git a/google/appengine/api/datastore_types.py b/google/appengine/api/datastore_types.py
index 57dff8c..60a0de2 100755
--- a/google/appengine/api/datastore_types.py
+++ b/google/appengine/api/datastore_types.py
@@ -112,6 +112,10 @@
_EMPTY_NAMESPACE_ID = 1
+_EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+
+
class UtcTzinfo(datetime.tzinfo):
@@ -736,7 +740,7 @@
return cmp(len(self_args), len(other_args))
def __hash__(self):
- """Returns a 32-bit integer hash of this key.
+ """Returns an integer hash of this key.
Implements Python's hash protocol so that Keys may be used in sets and as
dictionary keys.
@@ -749,6 +753,24 @@
return hash(type(args)) ^ hash(tuple(args))
+class _OverflowDateTime(long):
+ """Container for GD_WHEN values that don't fit into a datetime.datetime.
+
+ This class only exists to safely round-trip GD_WHEN values that are too large
+ to fit in a datetime.datetime instance e.g. that were created by Java
+ applications. It should not be created directly.
+ """
+ pass
+
+
+def _When(val):
+ """Coverts a GD_WHEN value to the appropriate type."""
+ try:
+ return _EPOCH + datetime.timedelta(microseconds=val)
+ except OverflowError:
+ return _OverflowDateTime(val)
+
+
@@ -884,7 +906,7 @@
return cmp(self.lon, other.lon)
def __hash__(self):
- """Returns a 32-bit integer hash of this point.
+ """Returns an integer hash of this point.
Implements Python's hash protocol so that GeoPts may be used in sets and
as dictionary keys.
@@ -1247,6 +1269,7 @@
ByteString: entity_pb.Property.BYTESTRING,
Text: entity_pb.Property.TEXT,
datetime.datetime: entity_pb.Property.GD_WHEN,
+ _OverflowDateTime: entity_pb.Property.GD_WHEN,
Category: entity_pb.Property.ATOM_CATEGORY,
Link: entity_pb.Property.ATOM_LINK,
Email: entity_pb.Property.GD_EMAIL,
@@ -1265,6 +1288,7 @@
bool,
Category,
datetime.datetime,
+ _OverflowDateTime,
Email,
float,
GeoPt,
@@ -1379,6 +1403,7 @@
bool: ValidatePropertyNothing,
Category: ValidatePropertyString,
datetime.datetime: ValidatePropertyNothing,
+ _OverflowDateTime: ValidatePropertyInteger,
Email: ValidatePropertyString,
float: ValidatePropertyNothing,
GeoPt: ValidatePropertyNothing,
@@ -1610,6 +1635,7 @@
bool: PackBool,
Category: PackString,
datetime.datetime: PackDatetime,
+ _OverflowDateTime: PackInteger,
Email: PackString,
float: PackFloat,
GeoPt: PackGeoPt,
@@ -1707,9 +1733,6 @@
-_EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-
@@ -1717,10 +1740,7 @@
_PROPERTY_CONVERSIONS = {
- entity_pb.Property.GD_WHEN:
-
-
- lambda val: _EPOCH + datetime.timedelta(microseconds=val),
+ entity_pb.Property.GD_WHEN: _When,
entity_pb.Property.ATOM_CATEGORY: Category,
entity_pb.Property.ATOM_LINK: Link,
entity_pb.Property.GD_EMAIL: Email,
@@ -1750,7 +1770,8 @@
if pbval.has_stringvalue():
value = pbval.stringvalue()
- if meaning not in (entity_pb.Property.BLOB, entity_pb.Property.BYTESTRING):
+ if not pb.has_meaning() or meaning not in (entity_pb.Property.BLOB,
+ entity_pb.Property.BYTESTRING):
value = unicode(value.decode('utf-8'))
elif pbval.has_int64value():
@@ -1788,7 +1809,7 @@
value = None
try:
- if pb.has_meaning() and pb.meaning() in _PROPERTY_CONVERSIONS:
+ if pb.has_meaning() and meaning in _PROPERTY_CONVERSIONS:
conversion = _PROPERTY_CONVERSIONS[meaning]
value = conversion(value)
except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
diff --git a/google/appengine/api/files/file.py b/google/appengine/api/files/file.py
index 9308560..8eb3bee 100755
--- a/google/appengine/api/files/file.py
+++ b/google/appengine/api/files/file.py
@@ -152,8 +152,12 @@
def _raise_app_error(e):
"""Convert RPC error into api-specific exception."""
- if (e.application_error ==
- file_service_pb.FileServiceErrors.EXISTENCE_ERROR):
+ if (e.application_error in
+ [file_service_pb.FileServiceErrors.EXISTENCE_ERROR,
+ file_service_pb.FileServiceErrors.EXISTENCE_ERROR_METADATA_NOT_FOUND,
+ file_service_pb.FileServiceErrors.EXISTENCE_ERROR_METADATA_FOUND,
+ file_service_pb.FileServiceErrors.EXISTENCE_ERROR_SHARDING_MISMATCH,
+ ]):
raise ExistenceError()
elif (e.application_error ==
file_service_pb.FileServiceErrors.API_TEMPORARILY_UNAVAILABLE):
@@ -505,6 +509,19 @@
raise InvalidFileNameError( 'Unsupported file name: %s' % filename)
+def _get_capabilities():
+ """Get files API capabilities.
+
+ Returns:
+ An instance of file_service_pb.GetCapabilitiesResponse.
+ """
+ request = file_service_pb.GetCapabilitiesRequest()
+ response = file_service_pb.GetCapabilitiesResponse()
+
+ _make_call('GetCapabilities', request, response)
+ return response
+
+
class BufferedFile(object):
"""BufferedFile is a file-like object reading underlying file in chunks."""
diff --git a/google/appengine/api/files/file_service_pb.py b/google/appengine/api/files/file_service_pb.py
index fa36f79..f506779 100755
--- a/google/appengine/api/files/file_service_pb.py
+++ b/google/appengine/api/files/file_service_pb.py
@@ -26,8 +26,6 @@
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
-from google.appengine.api.files.shuffle_types_pb import *
-import google.appengine.api.files.shuffle_types_pb
class FileServiceErrors(ProtocolBuffer.ProtocolMessage):
@@ -49,6 +47,9 @@
UNSUPPORTED_CONTENT_TYPE = 102
READ_ONLY = 103
EXCLUSIVE_LOCK_FAILED = 104
+ EXISTENCE_ERROR_METADATA_NOT_FOUND = 105
+ EXISTENCE_ERROR_METADATA_FOUND = 106
+ EXISTENCE_ERROR_SHARDING_MISMATCH = 107
SEQUENCE_KEY_OUT_OF_ORDER = 300
OUT_OF_BOUNDS = 500
GLOBS_NOT_SUPPORTED = 600
@@ -60,6 +61,7 @@
SHUFFLER_INTERNAL_ERROR = 800
SHUFFLE_REQUEST_TOO_LARGE = 801
DUPLICATE_SHUFFLE_NAME = 802
+ SHUFFLE_NOT_AVAILABLE = 803
SHUFFLER_TEMPORARILY_UNAVAILABLE = 900
MAX_ERROR_CODE = 9999
@@ -82,6 +84,9 @@
102: "UNSUPPORTED_CONTENT_TYPE",
103: "READ_ONLY",
104: "EXCLUSIVE_LOCK_FAILED",
+ 105: "EXISTENCE_ERROR_METADATA_NOT_FOUND",
+ 106: "EXISTENCE_ERROR_METADATA_FOUND",
+ 107: "EXISTENCE_ERROR_SHARDING_MISMATCH",
300: "SEQUENCE_KEY_OUT_OF_ORDER",
500: "OUT_OF_BOUNDS",
600: "GLOBS_NOT_SUPPORTED",
@@ -93,6 +98,7 @@
800: "SHUFFLER_INTERNAL_ERROR",
801: "SHUFFLE_REQUEST_TOO_LARGE",
802: "DUPLICATE_SHUFFLE_NAME",
+ 803: "SHUFFLE_NOT_AVAILABLE",
900: "SHUFFLER_TEMPORARILY_UNAVAILABLE",
9999: "MAX_ERROR_CODE",
}
@@ -163,6 +169,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileServiceErrors'
class KeyValue(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -301,6 +308,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.KeyValue'
class KeyValues(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -443,6 +451,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.KeyValues'
class FileContentType(ProtocolBuffer.ProtocolMessage):
@@ -522,6 +531,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileContentType'
class CreateRequest_Parameter(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -660,6 +670,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateRequest_Parameter'
class CreateRequest(ProtocolBuffer.ProtocolMessage):
has_filesystem_ = 0
filesystem_ = ""
@@ -919,6 +930,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateRequest'
class CreateResponse(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -1018,6 +1030,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.CreateResponse'
class OpenRequest(ProtocolBuffer.ProtocolMessage):
@@ -1310,6 +1323,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.OpenRequest'
class OpenResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1374,6 +1388,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.OpenResponse'
class CloseRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -1507,6 +1522,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.CloseRequest'
class CloseResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1571,6 +1587,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.CloseResponse'
class FileStat(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -1848,6 +1865,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.FileStat'
class StatRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -1976,6 +1994,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.StatRequest'
class StatResponse(ProtocolBuffer.ProtocolMessage):
has_more_files_found_ = 0
more_files_found_ = 0
@@ -2126,6 +2145,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.StatResponse'
class AppendRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -2298,6 +2318,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.AppendRequest'
class AppendResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2362,6 +2383,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.AppendResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -2461,6 +2483,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2525,6 +2548,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.DeleteResponse'
class ReadRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -2702,6 +2726,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadRequest'
class ReadResponse(ProtocolBuffer.ProtocolMessage):
has_data_ = 0
data_ = ""
@@ -2801,6 +2826,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadResponse'
class ReadKeyValueRequest(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -3012,6 +3038,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueRequest'
class ReadKeyValueResponse_KeyValue(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -3150,6 +3177,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueResponse_KeyValue'
class ReadKeyValueResponse(ProtocolBuffer.ProtocolMessage):
has_next_key_ = 0
next_key_ = ""
@@ -3331,16 +3359,607 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ReadKeyValueResponse'
+class ShuffleEnums(ProtocolBuffer.ProtocolMessage):
+
+
+ CSV_INPUT = 0
+ RECORDS_KEY_VALUE_PROTO_INPUT = 1
+
+ _InputFormat_NAMES = {
+ 0: "CSV_INPUT",
+ 1: "RECORDS_KEY_VALUE_PROTO_INPUT",
+ }
+
+ def InputFormat_Name(cls, x): return cls._InputFormat_NAMES.get(x, "")
+ InputFormat_Name = classmethod(InputFormat_Name)
+
+
+
+ CSV_OUTPUT = 0
+ RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT = 1
+
+ _OutputFormat_NAMES = {
+ 0: "CSV_OUTPUT",
+ 1: "RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT",
+ }
+
+ def OutputFormat_Name(cls, x): return cls._OutputFormat_NAMES.get(x, "")
+ OutputFormat_Name = classmethod(OutputFormat_Name)
+
+
+
+ UNKNOWN = 1
+ RUNNING = 2
+ SUCCESS = 3
+ FAILURE = 4
+ INVALID_INPUT = 5
+ OUTPUT_ALREADY_EXISTS = 6
+ INCORRECT_SHUFFLE_SIZE_BYTES = 7
+
+ _Status_NAMES = {
+ 1: "UNKNOWN",
+ 2: "RUNNING",
+ 3: "SUCCESS",
+ 4: "FAILURE",
+ 5: "INVALID_INPUT",
+ 6: "OUTPUT_ALREADY_EXISTS",
+ 7: "INCORRECT_SHUFFLE_SIZE_BYTES",
+ }
+
+ def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
+ Status_Name = classmethod(Status_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n
+
+ def ByteSizePartial(self):
+ n = 0
+ return n
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def OutputPartial(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleEnums'
+class ShuffleInputSpecification(ProtocolBuffer.ProtocolMessage):
+ has_format_ = 0
+ format_ = 0
+ has_path_ = 0
+ path_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def format(self): return self.format_
+
+ def set_format(self, x):
+ self.has_format_ = 1
+ self.format_ = x
+
+ def clear_format(self):
+ if self.has_format_:
+ self.has_format_ = 0
+ self.format_ = 0
+
+ def has_format(self): return self.has_format_
+
+ def path(self): return self.path_
+
+ def set_path(self, x):
+ self.has_path_ = 1
+ self.path_ = x
+
+ def clear_path(self):
+ if self.has_path_:
+ self.has_path_ = 0
+ self.path_ = ""
+
+ def has_path(self): return self.has_path_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_format()): self.set_format(x.format())
+ if (x.has_path()): self.set_path(x.path())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_format_ != x.has_format_: return 0
+ if self.has_format_ and self.format_ != x.format_: return 0
+ if self.has_path_ != x.has_path_: return 0
+ if self.has_path_ and self.path_ != x.path_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_path_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: path not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
+ n += self.lengthString(len(self.path_))
+ return n + 1
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
+ if (self.has_path_):
+ n += 1
+ n += self.lengthString(len(self.path_))
+ return n
+
+ def Clear(self):
+ self.clear_format()
+ self.clear_path()
+
+ def OutputUnchecked(self, out):
+ if (self.has_format_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.format_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.path_)
+
+ def OutputPartial(self, out):
+ if (self.has_format_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.format_)
+ if (self.has_path_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.path_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_format(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.set_path(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
+ if self.has_path_: res+=prefix+("path: %s\n" % self.DebugFormatString(self.path_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kformat = 1
+ kpath = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "format",
+ 2: "path",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleInputSpecification'
+class ShuffleOutputSpecification(ProtocolBuffer.ProtocolMessage):
+ has_format_ = 0
+ format_ = 0
+
+ def __init__(self, contents=None):
+ self.path_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def format(self): return self.format_
+
+ def set_format(self, x):
+ self.has_format_ = 1
+ self.format_ = x
+
+ def clear_format(self):
+ if self.has_format_:
+ self.has_format_ = 0
+ self.format_ = 0
+
+ def has_format(self): return self.has_format_
+
+ def path_size(self): return len(self.path_)
+ def path_list(self): return self.path_
+
+ def path(self, i):
+ return self.path_[i]
+
+ def set_path(self, i, x):
+ self.path_[i] = x
+
+ def add_path(self, x):
+ self.path_.append(x)
+
+ def clear_path(self):
+ self.path_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_format()): self.set_format(x.format())
+ for i in xrange(x.path_size()): self.add_path(x.path(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_format_ != x.has_format_: return 0
+ if self.has_format_ and self.format_ != x.format_: return 0
+ if len(self.path_) != len(x.path_): return 0
+ for e1, e2 in zip(self.path_, x.path_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
+ n += 1 * len(self.path_)
+ for i in xrange(len(self.path_)): n += self.lengthString(len(self.path_[i]))
+ return n
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
+ n += 1 * len(self.path_)
+ for i in xrange(len(self.path_)): n += self.lengthString(len(self.path_[i]))
+ return n
+
+ def Clear(self):
+ self.clear_format()
+ self.clear_path()
+
+ def OutputUnchecked(self, out):
+ if (self.has_format_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.format_)
+ for i in xrange(len(self.path_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.path_[i])
+
+ def OutputPartial(self, out):
+ if (self.has_format_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.format_)
+ for i in xrange(len(self.path_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.path_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_format(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.add_path(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
+ cnt=0
+ for e in self.path_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("path%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kformat = 1
+ kpath = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "format",
+ 2: "path",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleOutputSpecification'
+class ShuffleRequest_Callback(ProtocolBuffer.ProtocolMessage):
+ has_url_ = 0
+ url_ = ""
+ has_app_version_id_ = 0
+ app_version_id_ = ""
+ has_method_ = 0
+ method_ = "POST"
+ has_queue_ = 0
+ queue_ = "default"
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def app_version_id(self): return self.app_version_id_
+
+ def set_app_version_id(self, x):
+ self.has_app_version_id_ = 1
+ self.app_version_id_ = x
+
+ def clear_app_version_id(self):
+ if self.has_app_version_id_:
+ self.has_app_version_id_ = 0
+ self.app_version_id_ = ""
+
+ def has_app_version_id(self): return self.has_app_version_id_
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = "POST"
+
+ def has_method(self): return self.has_method_
+
+ def queue(self): return self.queue_
+
+ def set_queue(self, x):
+ self.has_queue_ = 1
+ self.queue_ = x
+
+ def clear_queue(self):
+ if self.has_queue_:
+ self.has_queue_ = 0
+ self.queue_ = "default"
+
+ def has_queue(self): return self.has_queue_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_url()): self.set_url(x.url())
+ if (x.has_app_version_id()): self.set_app_version_id(x.app_version_id())
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_queue()): self.set_queue(x.queue())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if self.has_app_version_id_ != x.has_app_version_id_: return 0
+ if self.has_app_version_id_ and self.app_version_id_ != x.app_version_id_: return 0
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_queue_ != x.has_queue_: return 0
+ if self.has_queue_ and self.queue_ != x.queue_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.url_))
+ if (self.has_app_version_id_): n += 1 + self.lengthString(len(self.app_version_id_))
+ if (self.has_method_): n += 1 + self.lengthString(len(self.method_))
+ if (self.has_queue_): n += 1 + self.lengthString(len(self.queue_))
+ return n + 1
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_url_):
+ n += 1
+ n += self.lengthString(len(self.url_))
+ if (self.has_app_version_id_): n += 1 + self.lengthString(len(self.app_version_id_))
+ if (self.has_method_): n += 1 + self.lengthString(len(self.method_))
+ if (self.has_queue_): n += 1 + self.lengthString(len(self.queue_))
+ return n
+
+ def Clear(self):
+ self.clear_url()
+ self.clear_app_version_id()
+ self.clear_method()
+ self.clear_queue()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.url_)
+ if (self.has_app_version_id_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.app_version_id_)
+ if (self.has_method_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.method_)
+ if (self.has_queue_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.queue_)
+
+ def OutputPartial(self, out):
+ if (self.has_url_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.url_)
+ if (self.has_app_version_id_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.app_version_id_)
+ if (self.has_method_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.method_)
+ if (self.has_queue_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.queue_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_app_version_id(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_method(d.getPrefixedString())
+ continue
+ if tt == 34:
+ self.set_queue(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
+ if self.has_app_version_id_: res+=prefix+("app_version_id: %s\n" % self.DebugFormatString(self.app_version_id_))
+ if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatString(self.method_))
+ if self.has_queue_: res+=prefix+("queue: %s\n" % self.DebugFormatString(self.queue_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kurl = 1
+ kapp_version_id = 2
+ kmethod = 3
+ kqueue = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "url",
+ 2: "app_version_id",
+ 3: "method",
+ 4: "queue",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleRequest_Callback'
class ShuffleRequest(ProtocolBuffer.ProtocolMessage):
has_shuffle_name_ = 0
shuffle_name_ = ""
has_output_ = 0
has_shuffle_size_bytes_ = 0
shuffle_size_bytes_ = 0
+ has_callback_ = 0
def __init__(self, contents=None):
self.input_ = []
self.output_ = ShuffleOutputSpecification()
+ self.callback_ = ShuffleRequest_Callback()
if contents is not None: self.MergeFromString(contents)
def shuffle_name(self): return self.shuffle_name_
@@ -3393,6 +4012,14 @@
def has_shuffle_size_bytes(self): return self.has_shuffle_size_bytes_
+ def callback(self): return self.callback_
+
+ def mutable_callback(self): self.has_callback_ = 1; return self.callback_
+
+ def clear_callback(self):self.has_callback_ = 0; self.callback_.Clear()
+
+ def has_callback(self): return self.has_callback_
+
def MergeFrom(self, x):
assert x is not self
@@ -3400,6 +4027,7 @@
for i in xrange(x.input_size()): self.add_input().CopyFrom(x.input(i))
if (x.has_output()): self.mutable_output().MergeFrom(x.output())
if (x.has_shuffle_size_bytes()): self.set_shuffle_size_bytes(x.shuffle_size_bytes())
+ if (x.has_callback()): self.mutable_callback().MergeFrom(x.callback())
def Equals(self, x):
if x is self: return 1
@@ -3412,6 +4040,8 @@
if self.has_output_ and self.output_ != x.output_: return 0
if self.has_shuffle_size_bytes_ != x.has_shuffle_size_bytes_: return 0
if self.has_shuffle_size_bytes_ and self.shuffle_size_bytes_ != x.shuffle_size_bytes_: return 0
+ if self.has_callback_ != x.has_callback_: return 0
+ if self.has_callback_ and self.callback_ != x.callback_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -3431,6 +4061,11 @@
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: shuffle_size_bytes not set.')
+ if (not self.has_callback_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: callback not set.')
+ elif not self.callback_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
@@ -3440,7 +4075,8 @@
for i in xrange(len(self.input_)): n += self.lengthString(self.input_[i].ByteSize())
n += self.lengthString(self.output_.ByteSize())
n += self.lengthVarInt64(self.shuffle_size_bytes_)
- return n + 3
+ n += self.lengthString(self.callback_.ByteSize())
+ return n + 4
def ByteSizePartial(self):
n = 0
@@ -3455,6 +4091,9 @@
if (self.has_shuffle_size_bytes_):
n += 1
n += self.lengthVarInt64(self.shuffle_size_bytes_)
+ if (self.has_callback_):
+ n += 1
+ n += self.lengthString(self.callback_.ByteSizePartial())
return n
def Clear(self):
@@ -3462,6 +4101,7 @@
self.clear_input()
self.clear_output()
self.clear_shuffle_size_bytes()
+ self.clear_callback()
def OutputUnchecked(self, out):
out.putVarInt32(10)
@@ -3475,6 +4115,9 @@
self.output_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt64(self.shuffle_size_bytes_)
+ out.putVarInt32(42)
+ out.putVarInt32(self.callback_.ByteSize())
+ self.callback_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_shuffle_name_):
@@ -3491,6 +4134,10 @@
if (self.has_shuffle_size_bytes_):
out.putVarInt32(32)
out.putVarInt64(self.shuffle_size_bytes_)
+ if (self.has_callback_):
+ out.putVarInt32(42)
+ out.putVarInt32(self.callback_.ByteSizePartial())
+ self.callback_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
@@ -3513,6 +4160,12 @@
if tt == 32:
self.set_shuffle_size_bytes(d.getVarInt64())
continue
+ if tt == 42:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_callback().TryMerge(tmp)
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -3535,6 +4188,10 @@
res+=self.output_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_shuffle_size_bytes_: res+=prefix+("shuffle_size_bytes: %s\n" % self.DebugFormatInt64(self.shuffle_size_bytes_))
+ if self.has_callback_:
+ res+=prefix+"callback <\n"
+ res+=self.callback_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
return res
@@ -3545,6 +4202,7 @@
kinput = 2
koutput = 3
kshuffle_size_bytes = 4
+ kcallback = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
@@ -3552,7 +4210,8 @@
2: "input",
3: "output",
4: "shuffle_size_bytes",
- }, 4)
+ 5: "callback",
+ }, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
@@ -3560,11 +4219,13 @@
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
- }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleRequest'
class ShuffleResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -3629,6 +4290,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.ShuffleResponse'
class GetShuffleStatusRequest(ProtocolBuffer.ProtocolMessage):
has_shuffle_name_ = 0
shuffle_name_ = ""
@@ -3728,20 +4390,26 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetShuffleStatusRequest'
class GetShuffleStatusResponse(ProtocolBuffer.ProtocolMessage):
has_status_ = 0
+ status_ = 0
has_description_ = 0
description_ = ""
def __init__(self, contents=None):
- self.status_ = Status()
if contents is not None: self.MergeFromString(contents)
def status(self): return self.status_
- def mutable_status(self): self.has_status_ = 1; return self.status_
+ def set_status(self, x):
+ self.has_status_ = 1
+ self.status_ = x
- def clear_status(self):self.has_status_ = 0; self.status_.Clear()
+ def clear_status(self):
+ if self.has_status_:
+ self.has_status_ = 0
+ self.status_ = 0
def has_status(self): return self.has_status_
@@ -3761,7 +4429,7 @@
def MergeFrom(self, x):
assert x is not self
- if (x.has_status()): self.mutable_status().MergeFrom(x.status())
+ if (x.has_status()): self.set_status(x.status())
if (x.has_description()): self.set_description(x.description())
def Equals(self, x):
@@ -3778,12 +4446,11 @@
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: status not set.')
- elif not self.status_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
- n += self.lengthString(self.status_.ByteSize())
+ n += self.lengthVarInt64(self.status_)
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
return n + 1
@@ -3791,7 +4458,7 @@
n = 0
if (self.has_status_):
n += 1
- n += self.lengthString(self.status_.ByteSizePartial())
+ n += self.lengthVarInt64(self.status_)
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
return n
@@ -3800,18 +4467,16 @@
self.clear_description()
def OutputUnchecked(self, out):
- out.putVarInt32(10)
- out.putVarInt32(self.status_.ByteSize())
- self.status_.OutputUnchecked(out)
+ out.putVarInt32(8)
+ out.putVarInt32(self.status_)
if (self.has_description_):
out.putVarInt32(18)
out.putPrefixedString(self.description_)
def OutputPartial(self, out):
if (self.has_status_):
- out.putVarInt32(10)
- out.putVarInt32(self.status_.ByteSizePartial())
- self.status_.OutputPartial(out)
+ out.putVarInt32(8)
+ out.putVarInt32(self.status_)
if (self.has_description_):
out.putVarInt32(18)
out.putPrefixedString(self.description_)
@@ -3819,11 +4484,8 @@
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
- if tt == 10:
- length = d.getVarInt32()
- tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
- d.skip(length)
- self.mutable_status().TryMerge(tmp)
+ if tt == 8:
+ self.set_status(d.getVarInt32())
continue
if tt == 18:
self.set_description(d.getPrefixedString())
@@ -3836,10 +4498,7 @@
def __str__(self, prefix="", printElemNumber=0):
res=""
- if self.has_status_:
- res+=prefix+"status <\n"
- res+=self.status_.__str__(prefix + " ", printElemNumber)
- res+=prefix+">\n"
+ if self.has_status_: res+=prefix+("status: %s\n" % self.DebugFormatInt32(self.status_))
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
return res
@@ -3858,12 +4517,219 @@
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
- 1: ProtocolBuffer.Encoder.STRING,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetShuffleStatusResponse'
+class GetCapabilitiesRequest(ProtocolBuffer.ProtocolMessage):
-__all__ = ['FileServiceErrors','KeyValue','KeyValues','FileContentType','CreateRequest_Parameter','CreateRequest','CreateResponse','OpenRequest','OpenResponse','CloseRequest','CloseResponse','FileStat','StatRequest','StatResponse','AppendRequest','AppendResponse','DeleteRequest','DeleteResponse','ReadRequest','ReadResponse','ReadKeyValueRequest','ReadKeyValueResponse_KeyValue','ReadKeyValueResponse','ShuffleRequest','ShuffleResponse','GetShuffleStatusRequest','GetShuffleStatusResponse']
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n
+
+ def ByteSizePartial(self):
+ n = 0
+ return n
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def OutputPartial(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetCapabilitiesRequest'
+class GetCapabilitiesResponse(ProtocolBuffer.ProtocolMessage):
+ has_shuffle_available_ = 0
+ shuffle_available_ = 0
+
+ def __init__(self, contents=None):
+ self.filesystem_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def filesystem_size(self): return len(self.filesystem_)
+ def filesystem_list(self): return self.filesystem_
+
+ def filesystem(self, i):
+ return self.filesystem_[i]
+
+ def set_filesystem(self, i, x):
+ self.filesystem_[i] = x
+
+ def add_filesystem(self, x):
+ self.filesystem_.append(x)
+
+ def clear_filesystem(self):
+ self.filesystem_ = []
+
+ def shuffle_available(self): return self.shuffle_available_
+
+ def set_shuffle_available(self, x):
+ self.has_shuffle_available_ = 1
+ self.shuffle_available_ = x
+
+ def clear_shuffle_available(self):
+ if self.has_shuffle_available_:
+ self.has_shuffle_available_ = 0
+ self.shuffle_available_ = 0
+
+ def has_shuffle_available(self): return self.has_shuffle_available_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.filesystem_size()): self.add_filesystem(x.filesystem(i))
+ if (x.has_shuffle_available()): self.set_shuffle_available(x.shuffle_available())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.filesystem_) != len(x.filesystem_): return 0
+ for e1, e2 in zip(self.filesystem_, x.filesystem_):
+ if e1 != e2: return 0
+ if self.has_shuffle_available_ != x.has_shuffle_available_: return 0
+ if self.has_shuffle_available_ and self.shuffle_available_ != x.shuffle_available_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_shuffle_available_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: shuffle_available not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.filesystem_)
+ for i in xrange(len(self.filesystem_)): n += self.lengthString(len(self.filesystem_[i]))
+ return n + 2
+
+ def ByteSizePartial(self):
+ n = 0
+ n += 1 * len(self.filesystem_)
+ for i in xrange(len(self.filesystem_)): n += self.lengthString(len(self.filesystem_[i]))
+ if (self.has_shuffle_available_):
+ n += 2
+ return n
+
+ def Clear(self):
+ self.clear_filesystem()
+ self.clear_shuffle_available()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.filesystem_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.filesystem_[i])
+ out.putVarInt32(16)
+ out.putBoolean(self.shuffle_available_)
+
+ def OutputPartial(self, out):
+ for i in xrange(len(self.filesystem_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.filesystem_[i])
+ if (self.has_shuffle_available_):
+ out.putVarInt32(16)
+ out.putBoolean(self.shuffle_available_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.add_filesystem(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_shuffle_available(d.getBoolean())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.filesystem_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("filesystem%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_shuffle_available_: res+=prefix+("shuffle_available: %s\n" % self.DebugFormatBool(self.shuffle_available_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kfilesystem = 1
+ kshuffle_available = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "filesystem",
+ 2: "shuffle_available",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.files.GetCapabilitiesResponse'
+
+__all__ = ['FileServiceErrors','KeyValue','KeyValues','FileContentType','CreateRequest_Parameter','CreateRequest','CreateResponse','OpenRequest','OpenResponse','CloseRequest','CloseResponse','FileStat','StatRequest','StatResponse','AppendRequest','AppendResponse','DeleteRequest','DeleteResponse','ReadRequest','ReadResponse','ReadKeyValueRequest','ReadKeyValueResponse_KeyValue','ReadKeyValueResponse','ShuffleEnums','ShuffleInputSpecification','ShuffleOutputSpecification','ShuffleRequest_Callback','ShuffleRequest','ShuffleResponse','GetShuffleStatusRequest','GetShuffleStatusResponse','GetCapabilitiesRequest','GetCapabilitiesResponse']
diff --git a/google/appengine/api/files/file_service_stub.py b/google/appengine/api/files/file_service_stub.py
index 0448109..ef66ce9 100755
--- a/google/appengine/api/files/file_service_stub.py
+++ b/google/appengine/api/files/file_service_stub.py
@@ -324,3 +324,8 @@
raise_error(file_service_pb.FileServiceErrors.FILE_NOT_OPENED)
self.open_files[filename].append(request, response)
+
+ def _Dynamic_GetCapabilities(self, request, response):
+ """Handler for GetCapabilities RPC call."""
+ response.add_filesystem("blobstore")
+ response.set_shuffle_available(False)
diff --git a/google/appengine/api/files/shuffle_types_pb.py b/google/appengine/api/files/shuffle_types_pb.py
deleted file mode 100644
index 13f7737..0000000
--- a/google/appengine/api/files/shuffle_types_pb.py
+++ /dev/null
@@ -1,411 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2007 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-from google.net.proto import ProtocolBuffer
-import array
-import dummy_thread as thread
-
-__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
- unusednames=printElemNumber,debug_strs no-special"""
-
-class ShuffleEnums(ProtocolBuffer.ProtocolMessage):
-
-
- CSV_INPUT = 0
- SSTABLE_KEY_VALUE_PROTO_INPUT = 1
- RECORDS_KEY_VALUE_PROTO_INPUT = 2
-
- _InputFormat_NAMES = {
- 0: "CSV_INPUT",
- 1: "SSTABLE_KEY_VALUE_PROTO_INPUT",
- 2: "RECORDS_KEY_VALUE_PROTO_INPUT",
- }
-
- def InputFormat_Name(cls, x): return cls._InputFormat_NAMES.get(x, "")
- InputFormat_Name = classmethod(InputFormat_Name)
-
-
-
- CSV_OUTPUT = 0
- SSTABLE_MULTI_VALUE_PROTO_OUTPUT = 1
- RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT = 2
-
- _OutputFormat_NAMES = {
- 0: "CSV_OUTPUT",
- 1: "SSTABLE_MULTI_VALUE_PROTO_OUTPUT",
- 2: "RECORDS_KEY_MULTI_VALUE_PROTO_OUTPUT",
- }
-
- def OutputFormat_Name(cls, x): return cls._OutputFormat_NAMES.get(x, "")
- OutputFormat_Name = classmethod(OutputFormat_Name)
-
-
-
- UNKNOWN = 1
- RUNNING = 2
- SUCCESS = 3
- FAILURE = 4
- INVALID_INPUT = 5
- OUTPUT_ALREADY_EXISTS = 6
- INCORRECT_SHUFFLE_SIZE_BYTES = 7
-
- _Status_NAMES = {
- 1: "UNKNOWN",
- 2: "RUNNING",
- 3: "SUCCESS",
- 4: "FAILURE",
- 5: "INVALID_INPUT",
- 6: "OUTPUT_ALREADY_EXISTS",
- 7: "INCORRECT_SHUFFLE_SIZE_BYTES",
- }
-
- def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
- Status_Name = classmethod(Status_Name)
-
-
- def __init__(self, contents=None):
- pass
- if contents is not None: self.MergeFromString(contents)
-
-
- def MergeFrom(self, x):
- assert x is not self
-
- def Equals(self, x):
- if x is self: return 1
- return 1
-
- def IsInitialized(self, debug_strs=None):
- initialized = 1
- return initialized
-
- def ByteSize(self):
- n = 0
- return n
-
- def ByteSizePartial(self):
- n = 0
- return n
-
- def Clear(self):
- pass
-
- def OutputUnchecked(self, out):
- pass
-
- def OutputPartial(self, out):
- pass
-
- def TryMerge(self, d):
- while d.avail() > 0:
- tt = d.getVarInt32()
-
-
- if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
- d.skipData(tt)
-
-
- def __str__(self, prefix="", printElemNumber=0):
- res=""
- return res
-
-
- def _BuildTagLookupTable(sparse, maxtag, default=None):
- return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
-
- _TEXT = _BuildTagLookupTable({
- 0: "ErrorCode",
- }, 0)
-
- _TYPES = _BuildTagLookupTable({
- 0: ProtocolBuffer.Encoder.NUMERIC,
- }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
- _STYLE = """"""
- _STYLE_CONTENT_TYPE = """"""
-class ShuffleInputSpecification(ProtocolBuffer.ProtocolMessage):
- has_format_ = 0
- format_ = 0
- has_path_ = 0
- path_ = ""
-
- def __init__(self, contents=None):
- if contents is not None: self.MergeFromString(contents)
-
- def format(self): return self.format_
-
- def set_format(self, x):
- self.has_format_ = 1
- self.format_ = x
-
- def clear_format(self):
- if self.has_format_:
- self.has_format_ = 0
- self.format_ = 0
-
- def has_format(self): return self.has_format_
-
- def path(self): return self.path_
-
- def set_path(self, x):
- self.has_path_ = 1
- self.path_ = x
-
- def clear_path(self):
- if self.has_path_:
- self.has_path_ = 0
- self.path_ = ""
-
- def has_path(self): return self.has_path_
-
-
- def MergeFrom(self, x):
- assert x is not self
- if (x.has_format()): self.set_format(x.format())
- if (x.has_path()): self.set_path(x.path())
-
- def Equals(self, x):
- if x is self: return 1
- if self.has_format_ != x.has_format_: return 0
- if self.has_format_ and self.format_ != x.format_: return 0
- if self.has_path_ != x.has_path_: return 0
- if self.has_path_ and self.path_ != x.path_: return 0
- return 1
-
- def IsInitialized(self, debug_strs=None):
- initialized = 1
- if (not self.has_path_):
- initialized = 0
- if debug_strs is not None:
- debug_strs.append('Required field: path not set.')
- return initialized
-
- def ByteSize(self):
- n = 0
- if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
- n += self.lengthString(len(self.path_))
- return n + 1
-
- def ByteSizePartial(self):
- n = 0
- if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
- if (self.has_path_):
- n += 1
- n += self.lengthString(len(self.path_))
- return n
-
- def Clear(self):
- self.clear_format()
- self.clear_path()
-
- def OutputUnchecked(self, out):
- if (self.has_format_):
- out.putVarInt32(8)
- out.putVarInt32(self.format_)
- out.putVarInt32(18)
- out.putPrefixedString(self.path_)
-
- def OutputPartial(self, out):
- if (self.has_format_):
- out.putVarInt32(8)
- out.putVarInt32(self.format_)
- if (self.has_path_):
- out.putVarInt32(18)
- out.putPrefixedString(self.path_)
-
- def TryMerge(self, d):
- while d.avail() > 0:
- tt = d.getVarInt32()
- if tt == 8:
- self.set_format(d.getVarInt32())
- continue
- if tt == 18:
- self.set_path(d.getPrefixedString())
- continue
-
-
- if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
- d.skipData(tt)
-
-
- def __str__(self, prefix="", printElemNumber=0):
- res=""
- if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
- if self.has_path_: res+=prefix+("path: %s\n" % self.DebugFormatString(self.path_))
- return res
-
-
- def _BuildTagLookupTable(sparse, maxtag, default=None):
- return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
- kformat = 1
- kpath = 2
-
- _TEXT = _BuildTagLookupTable({
- 0: "ErrorCode",
- 1: "format",
- 2: "path",
- }, 2)
-
- _TYPES = _BuildTagLookupTable({
- 0: ProtocolBuffer.Encoder.NUMERIC,
- 1: ProtocolBuffer.Encoder.NUMERIC,
- 2: ProtocolBuffer.Encoder.STRING,
- }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
- _STYLE = """"""
- _STYLE_CONTENT_TYPE = """"""
-class ShuffleOutputSpecification(ProtocolBuffer.ProtocolMessage):
- has_format_ = 0
- format_ = 0
- has_path_base_ = 0
- path_base_ = ""
-
- def __init__(self, contents=None):
- if contents is not None: self.MergeFromString(contents)
-
- def format(self): return self.format_
-
- def set_format(self, x):
- self.has_format_ = 1
- self.format_ = x
-
- def clear_format(self):
- if self.has_format_:
- self.has_format_ = 0
- self.format_ = 0
-
- def has_format(self): return self.has_format_
-
- def path_base(self): return self.path_base_
-
- def set_path_base(self, x):
- self.has_path_base_ = 1
- self.path_base_ = x
-
- def clear_path_base(self):
- if self.has_path_base_:
- self.has_path_base_ = 0
- self.path_base_ = ""
-
- def has_path_base(self): return self.has_path_base_
-
-
- def MergeFrom(self, x):
- assert x is not self
- if (x.has_format()): self.set_format(x.format())
- if (x.has_path_base()): self.set_path_base(x.path_base())
-
- def Equals(self, x):
- if x is self: return 1
- if self.has_format_ != x.has_format_: return 0
- if self.has_format_ and self.format_ != x.format_: return 0
- if self.has_path_base_ != x.has_path_base_: return 0
- if self.has_path_base_ and self.path_base_ != x.path_base_: return 0
- return 1
-
- def IsInitialized(self, debug_strs=None):
- initialized = 1
- if (not self.has_path_base_):
- initialized = 0
- if debug_strs is not None:
- debug_strs.append('Required field: path_base not set.')
- return initialized
-
- def ByteSize(self):
- n = 0
- if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
- n += self.lengthString(len(self.path_base_))
- return n + 1
-
- def ByteSizePartial(self):
- n = 0
- if (self.has_format_): n += 1 + self.lengthVarInt64(self.format_)
- if (self.has_path_base_):
- n += 1
- n += self.lengthString(len(self.path_base_))
- return n
-
- def Clear(self):
- self.clear_format()
- self.clear_path_base()
-
- def OutputUnchecked(self, out):
- if (self.has_format_):
- out.putVarInt32(8)
- out.putVarInt32(self.format_)
- out.putVarInt32(18)
- out.putPrefixedString(self.path_base_)
-
- def OutputPartial(self, out):
- if (self.has_format_):
- out.putVarInt32(8)
- out.putVarInt32(self.format_)
- if (self.has_path_base_):
- out.putVarInt32(18)
- out.putPrefixedString(self.path_base_)
-
- def TryMerge(self, d):
- while d.avail() > 0:
- tt = d.getVarInt32()
- if tt == 8:
- self.set_format(d.getVarInt32())
- continue
- if tt == 18:
- self.set_path_base(d.getPrefixedString())
- continue
-
-
- if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
- d.skipData(tt)
-
-
- def __str__(self, prefix="", printElemNumber=0):
- res=""
- if self.has_format_: res+=prefix+("format: %s\n" % self.DebugFormatInt32(self.format_))
- if self.has_path_base_: res+=prefix+("path_base: %s\n" % self.DebugFormatString(self.path_base_))
- return res
-
-
- def _BuildTagLookupTable(sparse, maxtag, default=None):
- return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
- kformat = 1
- kpath_base = 2
-
- _TEXT = _BuildTagLookupTable({
- 0: "ErrorCode",
- 1: "format",
- 2: "path_base",
- }, 2)
-
- _TYPES = _BuildTagLookupTable({
- 0: ProtocolBuffer.Encoder.NUMERIC,
- 1: ProtocolBuffer.Encoder.NUMERIC,
- 2: ProtocolBuffer.Encoder.STRING,
- }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
-
-
- _STYLE = """"""
- _STYLE_CONTENT_TYPE = """"""
-
-__all__ = ['ShuffleEnums','ShuffleInputSpecification','ShuffleOutputSpecification']
diff --git a/google/appengine/api/images/images_service_pb.py b/google/appengine/api/images/images_service_pb.py
index 26851d7..745b57c 100644
--- a/google/appengine/api/images/images_service_pb.py
+++ b/google/appengine/api/images/images_service_pb.py
@@ -109,6 +109,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesServiceError'
class ImagesServiceTransform(ProtocolBuffer.ProtocolMessage):
@@ -194,6 +195,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesServiceTransform'
class Transform(ProtocolBuffer.ProtocolMessage):
has_width_ = 0
width_ = 0
@@ -696,6 +698,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.Transform'
class ImageData(ProtocolBuffer.ProtocolMessage):
has_content_ = 0
content_ = ""
@@ -829,6 +832,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImageData'
class InputSettings(ProtocolBuffer.ProtocolMessage):
@@ -970,6 +974,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.InputSettings'
class OutputSettings(ProtocolBuffer.ProtocolMessage):
@@ -1113,6 +1118,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.OutputSettings'
class ImagesTransformRequest(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
has_output_ = 0
@@ -1362,6 +1368,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesTransformRequest'
class ImagesTransformResponse(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
has_source_metadata_ = 0
@@ -1499,6 +1506,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesTransformResponse'
class CompositeImageOptions(ProtocolBuffer.ProtocolMessage):
@@ -1779,6 +1787,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CompositeImageOptions'
class ImagesCanvas(ProtocolBuffer.ProtocolMessage):
has_width_ = 0
width_ = 0
@@ -1994,6 +2003,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesCanvas'
class ImagesCompositeRequest(ProtocolBuffer.ProtocolMessage):
has_canvas_ = 0
@@ -2203,6 +2213,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesCompositeRequest'
class ImagesCompositeResponse(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
@@ -2306,6 +2317,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesCompositeResponse'
class ImagesHistogramRequest(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
@@ -2409,6 +2421,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesHistogramRequest'
class ImagesHistogram(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2598,6 +2611,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesHistogram'
class ImagesHistogramResponse(ProtocolBuffer.ProtocolMessage):
has_histogram_ = 0
@@ -2701,6 +2715,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesHistogramResponse'
class ImagesGetUrlBaseRequest(ProtocolBuffer.ProtocolMessage):
has_blob_key_ = 0
blob_key_ = ""
@@ -2800,6 +2815,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesGetUrlBaseRequest'
class ImagesGetUrlBaseResponse(ProtocolBuffer.ProtocolMessage):
has_url_ = 0
url_ = ""
@@ -2899,5 +2915,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ImagesGetUrlBaseResponse'
__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','InputSettings','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse','ImagesGetUrlBaseRequest','ImagesGetUrlBaseResponse']
diff --git a/google/appengine/api/logservice/log_service_pb.py b/google/appengine/api/logservice/log_service_pb.py
index a5eb9d2..0d8f7b0 100755
--- a/google/appengine/api/logservice/log_service_pb.py
+++ b/google/appengine/api/logservice/log_service_pb.py
@@ -46,6 +46,477 @@
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
+class LogServiceError(ProtocolBuffer.ProtocolMessage):
+
+
+ OK = 0
+ INVALID_REQUEST = 1
+ STORAGE_ERROR = 2
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "INVALID_REQUEST",
+ 2: "STORAGE_ERROR",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ if _net_proto___parse__python is not None:
+ def _CMergeFromString(self, s):
+ _net_proto___parse__python.MergeFromString(self, 'apphosting.LogServiceError', s)
+
+ if _net_proto___parse__python is not None:
+ def _CEncode(self):
+ return _net_proto___parse__python.Encode(self, 'apphosting.LogServiceError')
+
+ if _net_proto___parse__python is not None:
+ def _CEncodePartial(self):
+ return _net_proto___parse__python.EncodePartial(self, 'apphosting.LogServiceError')
+
+ if _net_proto___parse__python is not None:
+ def _CToASCII(self, output_format):
+ return _net_proto___parse__python.ToASCII(self, 'apphosting.LogServiceError', output_format)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCII(self, s):
+ _net_proto___parse__python.ParseASCII(self, 'apphosting.LogServiceError', s)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCIIIgnoreUnknown(self, s):
+ _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.LogServiceError', s)
+
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n
+
+ def ByteSizePartial(self):
+ n = 0
+ return n
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def OutputPartial(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.LogServiceError'
+ _SERIALIZED_DESCRIPTOR = array.array('B')
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcnN6CUVycm9yQ29kZYsBkgECT0uYAQCMAYsBkgEPSU5WQUxJRF9SRVFVRVNUmAEBjAGLAZIBDVNUT1JBR0VfRVJST1KYAQKMAXS6AdkOCithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvEgphcHBob3N0aW5nGh1hcHBob3N0aW5nL2FwaS9hcGlfYmFzZS5wcm90byJOCg9Mb2dTZXJ2aWNlRXJyb3IiOwoJRXJyb3JDb2RlEgYKAk9LEAASEwoPSU5WQUxJRF9SRVFVRVNUEAESEQoNU1RPUkFHRV9FUlJPUhACIkgKDlVzZXJBcHBMb2dMaW5lEhYKDnRpbWVzdGFtcF91c2VjGAEgAigDEg0KBWxldmVsGAIgAigDEg8KB21lc3NhZ2UYAyACKAkiPwoPVXNlckFwcExvZ0dyb3VwEiwKCGxvZ19saW5lGAIgAygLMhouYXBwaG9zdGluZy5Vc2VyQXBwTG9nTGluZSIcCgxGbHVzaFJlcXVlc3QSDAoEbG9ncxgBIAEoDCIiChBTZXRTdGF0dXNSZXF1ZXN0Eg4KBnN0YXR1cxgBIAIoCSIfCglMb2dPZmZzZXQSEgoKcmVxdWVzdF9pZBgBIAEoCSI7CgdMb2dMaW5lEgwKBHRpbWUYASACKAMSDQoFbGV2ZWwYAiACKAUSEwoLbG9nX21lc3NhZ2UYAyACKAki1wUKClJlcXVlc3RMb2cSDgoGYXBwX2lkGAEgAigJEhIKCnZlcnNpb25faWQYAiACKAkSEgoKcmVxdWVzdF9pZBgDIAIoCRIKCgJpcBgEIAIoCRIQCghuaWNrbmFtZRgFIAEoCRISCgpzdGFydF90aW1lGAYgAigDEhAKCGVuZF90aW1lGAcgAigDEg8KB2xhdGVuY3kYCCACKAMSDwoHbWN5Y2xlcxgJIAIoAxIOCgZtZXRob2QYCiACKAkSEAoIcmVzb3VyY2UYCyACKAkSFAoMaHR0cF92ZXJzaW9uGAwgAigJEg4KBnN0YXR1cxgNIAIoBRIVCg1yZXNwb25zZV9zaXplGA4gAigDEhAKCHJlZmVycmVyGA8gASgJEhIKCnVzZXJfYWdlbnQYECABKAkSFQoNdXJsX21hcF9lbnRyeRgRIAIoCRIQCghjb21iaW5lZBgSIAIoCRITCgthcGlfbWN5Y2xlcxgTIAEoAxIMCgRob3N0GBQgASgJEgwKBGNvc3QYFSABKAESFwoPdGFza19xdWV1ZV9uYW1lGBYgASgJEhEKCXRhc2tfbmFtZRgXIAEoCRIbChN3YXNfbG9hZGluZ19yZXF1ZXN0GBggASgIEhQKDHBlbmRpbmdfdGltZRgZIAEoAxIZCg1yZXBsaWNhX2luZGV4GBogASgFOgItMRIWCghmaW5pc2hlZBgbIAEoCDoEdHJ1ZRIRCgljbG9uZV9rZXkYHCABKAwSIQoEbGluZRgdIAMoCzITLmFwcGhvc3RpbmcuTG9nTGluZRITCgtleGl0X3JlYXNvbhgeIAEoBRIeChZ3YXNfdGhyb3R0bGVkX2Zvcl90aW1lGB8gASgIEiIKGndhc190aHJvdHRsZWRfZm9yX3JlcXVlc3RzGCAgASgIEhYKDnRocm90dGxlZF90aW1lGCEgASgDEhMKC3NlcnZlcl9uYW1lGCIgASgMIrgCCg5Mb2dSZWFkUmVxdWVzdBIOCgZhcHBfaWQYASACKAkSEgoKdmVyc2lvbl9pZBgCIAMoCRISCgpzdGFydF90aW1lGAMgASgDEhAKCGVuZF90aW1lGAQgASgDEiUKBm9mZnNldBgFIAEoCzIVLmFwcGhvc3RpbmcuTG9nT2Zmc2V0EhIKCnJlcXVlc3RfaWQYBiADKAkSGQoRbWluaW11bV9sb2dfbGV2ZWwYByABKAUSGgoSaW5jbHVkZV9pbmNvbXBsZXRlGAggASgIEg0KBWNvdW50GAkgASgDEhgKEGluY2x1ZGVfYXBwX2xvZ3MYCiABKAgSFAoMaW5jbHVkZV9ob3N0GAsgASgIEhMKC2luY2x1ZGVfYWxsGAwgASgIEhYKDmNhY2hlX2l0ZXJhdG9yGA0gASgIIl0KD0xvZ1JlYWRSZXNwb25zZRIjCgNsb2cYASADKAsyFi5hcHBob3N0aW5nLlJlcXVlc3RMb2cSJQoGb2Zmc2V0GAIgASgLMhUuYXBwaG9zdGluZy5Mb2dPZmZzZXQy0wEKCkxvZ1NlcnZpY2USPQoFRmx1c2gSGC5hcHBob3N0aW5nLkZsdXNoUmVxdWVzdBoaLmFwcGhvc3RpbmcuYmFzZS5Wb2lkUHJvdG8SRQoJU2V0U3RhdHVzEhwuYXBwaG9zdGluZy5TZXRTdGF0dXNSZXF1ZXN0GhouYXBwaG9zdGluZy5iYXNlLlZvaWRQcm90bxI/CgRSZWFkEhouYXBwaG9zdGluZy5Mb2dSZWFkUmVxdWVzdBobLmFwcGhvc3RpbmcuTG9nUmVhZFJlc3BvbnNlQjoKJGNvbS5nb29nbGUuYXBwaG9zdGluZy5hcGkubG9nc2VydmljZRABIAEoAUIMTG9nU2VydmljZVBi"))
+ if _net_proto___parse__python is not None:
+ _net_proto___parse__python.RegisterType(
+ _SERIALIZED_DESCRIPTOR.tostring())
+
+class UserAppLogLine(ProtocolBuffer.ProtocolMessage):
+ has_timestamp_usec_ = 0
+ timestamp_usec_ = 0
+ has_level_ = 0
+ level_ = 0
+ has_message_ = 0
+ message_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def timestamp_usec(self): return self.timestamp_usec_
+
+ def set_timestamp_usec(self, x):
+ self.has_timestamp_usec_ = 1
+ self.timestamp_usec_ = x
+
+ def clear_timestamp_usec(self):
+ if self.has_timestamp_usec_:
+ self.has_timestamp_usec_ = 0
+ self.timestamp_usec_ = 0
+
+ def has_timestamp_usec(self): return self.has_timestamp_usec_
+
+ def level(self): return self.level_
+
+ def set_level(self, x):
+ self.has_level_ = 1
+ self.level_ = x
+
+ def clear_level(self):
+ if self.has_level_:
+ self.has_level_ = 0
+ self.level_ = 0
+
+ def has_level(self): return self.has_level_
+
+ def message(self): return self.message_
+
+ def set_message(self, x):
+ self.has_message_ = 1
+ self.message_ = x
+
+ def clear_message(self):
+ if self.has_message_:
+ self.has_message_ = 0
+ self.message_ = ""
+
+ def has_message(self): return self.has_message_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_timestamp_usec()): self.set_timestamp_usec(x.timestamp_usec())
+ if (x.has_level()): self.set_level(x.level())
+ if (x.has_message()): self.set_message(x.message())
+
+ if _net_proto___parse__python is not None:
+ def _CMergeFromString(self, s):
+ _net_proto___parse__python.MergeFromString(self, 'apphosting.UserAppLogLine', s)
+
+ if _net_proto___parse__python is not None:
+ def _CEncode(self):
+ return _net_proto___parse__python.Encode(self, 'apphosting.UserAppLogLine')
+
+ if _net_proto___parse__python is not None:
+ def _CEncodePartial(self):
+ return _net_proto___parse__python.EncodePartial(self, 'apphosting.UserAppLogLine')
+
+ if _net_proto___parse__python is not None:
+ def _CToASCII(self, output_format):
+ return _net_proto___parse__python.ToASCII(self, 'apphosting.UserAppLogLine', output_format)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCII(self, s):
+ _net_proto___parse__python.ParseASCII(self, 'apphosting.UserAppLogLine', s)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCIIIgnoreUnknown(self, s):
+ _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.UserAppLogLine', s)
+
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_timestamp_usec_ != x.has_timestamp_usec_: return 0
+ if self.has_timestamp_usec_ and self.timestamp_usec_ != x.timestamp_usec_: return 0
+ if self.has_level_ != x.has_level_: return 0
+ if self.has_level_ and self.level_ != x.level_: return 0
+ if self.has_message_ != x.has_message_: return 0
+ if self.has_message_ and self.message_ != x.message_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_timestamp_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: timestamp_usec not set.')
+ if (not self.has_level_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: level not set.')
+ if (not self.has_message_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: message not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.timestamp_usec_)
+ n += self.lengthVarInt64(self.level_)
+ n += self.lengthString(len(self.message_))
+ return n + 3
+
+ def ByteSizePartial(self):
+ n = 0
+ if (self.has_timestamp_usec_):
+ n += 1
+ n += self.lengthVarInt64(self.timestamp_usec_)
+ if (self.has_level_):
+ n += 1
+ n += self.lengthVarInt64(self.level_)
+ if (self.has_message_):
+ n += 1
+ n += self.lengthString(len(self.message_))
+ return n
+
+ def Clear(self):
+ self.clear_timestamp_usec()
+ self.clear_level()
+ self.clear_message()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.timestamp_usec_)
+ out.putVarInt32(16)
+ out.putVarInt64(self.level_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.message_)
+
+ def OutputPartial(self, out):
+ if (self.has_timestamp_usec_):
+ out.putVarInt32(8)
+ out.putVarInt64(self.timestamp_usec_)
+ if (self.has_level_):
+ out.putVarInt32(16)
+ out.putVarInt64(self.level_)
+ if (self.has_message_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.message_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_timestamp_usec(d.getVarInt64())
+ continue
+ if tt == 16:
+ self.set_level(d.getVarInt64())
+ continue
+ if tt == 26:
+ self.set_message(d.getPrefixedString())
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_timestamp_usec_: res+=prefix+("timestamp_usec: %s\n" % self.DebugFormatInt64(self.timestamp_usec_))
+ if self.has_level_: res+=prefix+("level: %s\n" % self.DebugFormatInt64(self.level_))
+ if self.has_message_: res+=prefix+("message: %s\n" % self.DebugFormatString(self.message_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ktimestamp_usec = 1
+ klevel = 2
+ kmessage = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "timestamp_usec",
+ 2: "level",
+ 3: "message",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.UserAppLogLine'
+ _SERIALIZED_DESCRIPTOR = array.array('B')
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChlhcHBob3N0aW5nLlVzZXJBcHBMb2dMaW5lExoOdGltZXN0YW1wX3VzZWMgASgAMAM4AhQTGgVsZXZlbCACKAAwAzgCFBMaB21lc3NhZ2UgAygCMAk4AhTCARphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcg=="))
+ if _net_proto___parse__python is not None:
+ _net_proto___parse__python.RegisterType(
+ _SERIALIZED_DESCRIPTOR.tostring())
+
+class UserAppLogGroup(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.log_line_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def log_line_size(self): return len(self.log_line_)
+ def log_line_list(self): return self.log_line_
+
+ def log_line(self, i):
+ return self.log_line_[i]
+
+ def mutable_log_line(self, i):
+ return self.log_line_[i]
+
+ def add_log_line(self):
+ x = UserAppLogLine()
+ self.log_line_.append(x)
+ return x
+
+ def clear_log_line(self):
+ self.log_line_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.log_line_size()): self.add_log_line().CopyFrom(x.log_line(i))
+
+ if _net_proto___parse__python is not None:
+ def _CMergeFromString(self, s):
+ _net_proto___parse__python.MergeFromString(self, 'apphosting.UserAppLogGroup', s)
+
+ if _net_proto___parse__python is not None:
+ def _CEncode(self):
+ return _net_proto___parse__python.Encode(self, 'apphosting.UserAppLogGroup')
+
+ if _net_proto___parse__python is not None:
+ def _CEncodePartial(self):
+ return _net_proto___parse__python.EncodePartial(self, 'apphosting.UserAppLogGroup')
+
+ if _net_proto___parse__python is not None:
+ def _CToASCII(self, output_format):
+ return _net_proto___parse__python.ToASCII(self, 'apphosting.UserAppLogGroup', output_format)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCII(self, s):
+ _net_proto___parse__python.ParseASCII(self, 'apphosting.UserAppLogGroup', s)
+
+
+ if _net_proto___parse__python is not None:
+ def ParseASCIIIgnoreUnknown(self, s):
+ _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'apphosting.UserAppLogGroup', s)
+
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.log_line_) != len(x.log_line_): return 0
+ for e1, e2 in zip(self.log_line_, x.log_line_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.log_line_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.log_line_)
+ for i in xrange(len(self.log_line_)): n += self.lengthString(self.log_line_[i].ByteSize())
+ return n
+
+ def ByteSizePartial(self):
+ n = 0
+ n += 1 * len(self.log_line_)
+ for i in xrange(len(self.log_line_)): n += self.lengthString(self.log_line_[i].ByteSizePartial())
+ return n
+
+ def Clear(self):
+ self.clear_log_line()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.log_line_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.log_line_[i].ByteSize())
+ self.log_line_[i].OutputUnchecked(out)
+
+ def OutputPartial(self, out):
+ for i in xrange(len(self.log_line_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.log_line_[i].ByteSizePartial())
+ self.log_line_[i].OutputPartial(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_log_line().TryMerge(tmp)
+ continue
+
+
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.log_line_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("log_line%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ klog_line = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 2: "log_line",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.UserAppLogGroup'
+ _SERIALIZED_DESCRIPTOR = array.array('B')
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChphcHBob3N0aW5nLlVzZXJBcHBMb2dHcm91cBMaCGxvZ19saW5lIAIoAjALOANKGWFwcGhvc3RpbmcuVXNlckFwcExvZ0xpbmUUwgEaYXBwaG9zdGluZy5Mb2dTZXJ2aWNlRXJyb3I="))
+ if _net_proto___parse__python is not None:
+ _net_proto___parse__python.RegisterType(
+ _SERIALIZED_DESCRIPTOR.tostring())
+
class FlushRequest(ProtocolBuffer.ProtocolMessage):
has_logs_ = 0
logs_ = ""
@@ -167,8 +638,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.FlushRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChdhcHBob3N0aW5nLkZsdXNoUmVxdWVzdBMaBGxvZ3MgASgCMAk4ARS6Ab0MCithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvEgphcHBob3N0aW5nGh1hcHBob3N0aW5nL2FwaS9hcGlfYmFzZS5wcm90byIcCgxGbHVzaFJlcXVlc3QSDAoEbG9ncxgBIAEoDCIiChBTZXRTdGF0dXNSZXF1ZXN0Eg4KBnN0YXR1cxgBIAIoCSIfCglMb2dPZmZzZXQSEgoKcmVxdWVzdF9pZBgBIAEoCSI7CgdMb2dMaW5lEgwKBHRpbWUYASACKAMSDQoFbGV2ZWwYAiACKAUSEwoLbG9nX21lc3NhZ2UYAyACKAki1wUKClJlcXVlc3RMb2cSDgoGYXBwX2lkGAEgAigJEhIKCnZlcnNpb25faWQYAiACKAkSEgoKcmVxdWVzdF9pZBgDIAIoCRIKCgJpcBgEIAIoCRIQCghuaWNrbmFtZRgFIAEoCRISCgpzdGFydF90aW1lGAYgAigDEhAKCGVuZF90aW1lGAcgAigDEg8KB2xhdGVuY3kYCCACKAMSDwoHbWN5Y2xlcxgJIAIoAxIOCgZtZXRob2QYCiACKAkSEAoIcmVzb3VyY2UYCyACKAkSFAoMaHR0cF92ZXJzaW9uGAwgAigJEg4KBnN0YXR1cxgNIAIoBRIVCg1yZXNwb25zZV9zaXplGA4gAigDEhAKCHJlZmVycmVyGA8gASgJEhIKCnVzZXJfYWdlbnQYECABKAkSFQoNdXJsX21hcF9lbnRyeRgRIAIoCRIQCghjb21iaW5lZBgSIAIoCRITCgthcGlfbWN5Y2xlcxgTIAEoAxIMCgRob3N0GBQgASgJEgwKBGNvc3QYFSABKAESFwoPdGFza19xdWV1ZV9uYW1lGBYgASgJEhEKCXRhc2tfbmFtZRgXIAEoCRIbChN3YXNfbG9hZGluZ19yZXF1ZXN0GBggASgIEhQKDHBlbmRpbmdfdGltZRgZIAEoAxIZCg1yZXBsaWNhX2luZGV4GBogASgFOgItMRIWCghmaW5pc2hlZBgbIAEoCDoEdHJ1ZRIRCgljbG9uZV9rZXkYHCABKAwSIQoEbGluZRgdIAMoCzITLmFwcGhvc3RpbmcuTG9nTGluZRITCgtleGl0X3JlYXNvbhgeIAEoBRIeChZ3YXNfdGhyb3R0bGVkX2Zvcl90aW1lGB8gASgIEiIKGndhc190aHJvdHRsZWRfZm9yX3JlcXVlc3RzGCAgASgIEhYKDnRocm90dGxlZF90aW1lGCEgASgDEhMKC3NlcnZlcl9uYW1lGCIgASgMIrgCCg5Mb2dSZWFkUmVxdWVzdBIOCgZhcHBfaWQYASACKAkSEgoKdmVyc2lvbl9pZBgCIAMoCRISCgpzdGFydF90aW1lGAMgASgDEhAKCGVuZF90aW1lGAQgASgDEiUKBm9mZnNldBgFIAEoCzIVLmFwcGhvc3RpbmcuTG9nT2Zmc2V0EhIKCnJlcXVlc3RfaWQYBiADKAkSGQoRbWluaW11bV9sb2dfbGV2ZWwYByABKAUSGgoSaW5jbHVkZV9pbmNvbXBsZXRlGAggASgIEg0KBWNvdW50GAkgASgDEhgKEGluY2x1ZGVfYXBwX2xvZ3MYCiABKAgSFAoMaW5jbHVkZV9ob3N0GAsgASgIEhMKC2luY2x1ZGVfYWxsGAwgASgIEhYKDmNhY2hlX2l0ZXJhdG9yGA0gASgIIl0KD0xvZ1JlYWRSZXNwb25zZRIjCgNsb2cYASADKAsyFi5hcHBob3N0aW5nLlJlcXVlc3RMb2cSJQoGb2Zmc2V0GAIgASgLMhUuYXBwaG9zdGluZy5Mb2dPZmZzZXQykgEKCkxvZ1NlcnZpY2USPQoFRmx1c2gSGC5hcHBob3N0aW5nLkZsdXNoUmVxdWVzdBoaLmFwcGhvc3RpbmcuYmFzZS5Wb2lkUHJvdG8SRQoJU2V0U3RhdHVzEhwuYXBwaG9zdGluZy5TZXRTdGF0dXNSZXF1ZXN0GhouYXBwaG9zdGluZy5iYXNlLlZvaWRQcm90b0I6CiRjb20uZ29vZ2xlLmFwcGhvc3RpbmcuYXBpLmxvZ3NlcnZpY2UQASABKAFCDExvZ1NlcnZpY2VQYg=="))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChdhcHBob3N0aW5nLkZsdXNoUmVxdWVzdBMaBGxvZ3MgASgCMAk4ARTCARphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -299,8 +771,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SetStatusRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChthcHBob3N0aW5nLlNldFN0YXR1c1JlcXVlc3QTGgZzdGF0dXMgASgCMAk4AhTCARdhcHBob3N0aW5nLkZsdXNoUmVxdWVzdA=="))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChthcHBob3N0aW5nLlNldFN0YXR1c1JlcXVlc3QTGgZzdGF0dXMgASgCMAk4AhTCARphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -426,8 +899,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.LogOffset'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChRhcHBob3N0aW5nLkxvZ09mZnNldBMaCnJlcXVlc3RfaWQgASgCMAk4ARTCARdhcHBob3N0aW5nLkZsdXNoUmVxdWVzdA=="))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChRhcHBob3N0aW5nLkxvZ09mZnNldBMaCnJlcXVlc3RfaWQgASgCMAk4ARTCARphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -636,8 +1110,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.LogLine'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChJhcHBob3N0aW5nLkxvZ0xpbmUTGgR0aW1lIAEoADADOAIUExoFbGV2ZWwgAigAMAU4AhQTGgtsb2dfbWVzc2FnZSADKAIwCTgCFMIBF2FwcGhvc3RpbmcuRmx1c2hSZXF1ZXN0"))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChJhcHBob3N0aW5nLkxvZ0xpbmUTGgR0aW1lIAEoADADOAIUExoFbGV2ZWwgAigAMAU4AhQTGgtsb2dfbWVzc2FnZSADKAIwCTgCFMIBGmFwcGhvc3RpbmcuTG9nU2VydmljZUVycm9y"))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -1979,8 +2454,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.RequestLog'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChVhcHBob3N0aW5nLlJlcXVlc3RMb2cTGgZhcHBfaWQgASgCMAk4AhQTGgp2ZXJzaW9uX2lkIAIoAjAJOAIUExoKcmVxdWVzdF9pZCADKAIwCTgCFBMaAmlwIAQoAjAJOAIUExoIbmlja25hbWUgBSgCMAk4ARQTGgpzdGFydF90aW1lIAYoADADOAIUExoIZW5kX3RpbWUgBygAMAM4AhQTGgdsYXRlbmN5IAgoADADOAIUExoHbWN5Y2xlcyAJKAAwAzgCFBMaBm1ldGhvZCAKKAIwCTgCFBMaCHJlc291cmNlIAsoAjAJOAIUExoMaHR0cF92ZXJzaW9uIAwoAjAJOAIUExoGc3RhdHVzIA0oADAFOAIUExoNcmVzcG9uc2Vfc2l6ZSAOKAAwAzgCFBMaCHJlZmVycmVyIA8oAjAJOAEUExoKdXNlcl9hZ2VudCAQKAIwCTgBFBMaDXVybF9tYXBfZW50cnkgESgCMAk4AhQTGghjb21iaW5lZCASKAIwCTgCFBMaC2FwaV9tY3ljbGVzIBMoADADOAEUExoEaG9zdCAUKAIwCTgBFBMaBGNvc3QgFSgBMAE4ARQTGg90YXNrX3F1ZXVlX25hbWUgFigCMAk4ARQTGgl0YXNrX25hbWUgFygCMAk4ARQTGhN3YXNfbG9hZGluZ19yZXF1ZXN0IBgoADAIOAEUExoMcGVuZGluZ190aW1lIBkoADADOAEUExoNcmVwbGljYV9pbmRleCAaKAAwBTgBQgItMaMBqgEHZGVmYXVsdLIBAi0xpAEUExoIZmluaXNoZWQgGygAMAg4AUIEdHJ1ZaMBqgEHZGVmYXVsdLIBBHRydWWkARQTGgljbG9uZV9rZXkgHCgCMAk4ARQTGgRsaW5lIB0oAjALOANKEmFwcGhvc3RpbmcuTG9nTGluZRQTGgtleGl0X3JlYXNvbiAeKAAwBTgBFBMaFndhc190aHJvdHRsZWRfZm9yX3RpbWUgHygAMAg4ARQTGhp3YXNfdGhyb3R0bGVkX2Zvcl9yZXF1ZXN0cyAgKAAwCDgBFBMaDnRocm90dGxlZF90aW1lICEoADADOAEUExoLc2VydmVyX25hbWUgIigCMAk4ARTCARdhcHBob3N0aW5nLkZsdXNoUmVxdWVzdA=="))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChVhcHBob3N0aW5nLlJlcXVlc3RMb2cTGgZhcHBfaWQgASgCMAk4AhQTGgp2ZXJzaW9uX2lkIAIoAjAJOAIUExoKcmVxdWVzdF9pZCADKAIwCTgCFBMaAmlwIAQoAjAJOAIUExoIbmlja25hbWUgBSgCMAk4ARQTGgpzdGFydF90aW1lIAYoADADOAIUExoIZW5kX3RpbWUgBygAMAM4AhQTGgdsYXRlbmN5IAgoADADOAIUExoHbWN5Y2xlcyAJKAAwAzgCFBMaBm1ldGhvZCAKKAIwCTgCFBMaCHJlc291cmNlIAsoAjAJOAIUExoMaHR0cF92ZXJzaW9uIAwoAjAJOAIUExoGc3RhdHVzIA0oADAFOAIUExoNcmVzcG9uc2Vfc2l6ZSAOKAAwAzgCFBMaCHJlZmVycmVyIA8oAjAJOAEUExoKdXNlcl9hZ2VudCAQKAIwCTgBFBMaDXVybF9tYXBfZW50cnkgESgCMAk4AhQTGghjb21iaW5lZCASKAIwCTgCFBMaC2FwaV9tY3ljbGVzIBMoADADOAEUExoEaG9zdCAUKAIwCTgBFBMaBGNvc3QgFSgBMAE4ARQTGg90YXNrX3F1ZXVlX25hbWUgFigCMAk4ARQTGgl0YXNrX25hbWUgFygCMAk4ARQTGhN3YXNfbG9hZGluZ19yZXF1ZXN0IBgoADAIOAEUExoMcGVuZGluZ190aW1lIBkoADADOAEUExoNcmVwbGljYV9pbmRleCAaKAAwBTgBQgItMaMBqgEHZGVmYXVsdLIBAi0xpAEUExoIZmluaXNoZWQgGygAMAg4AUIEdHJ1ZaMBqgEHZGVmYXVsdLIBBHRydWWkARQTGgljbG9uZV9rZXkgHCgCMAk4ARQTGgRsaW5lIB0oAjALOANKEmFwcGhvc3RpbmcuTG9nTGluZRQTGgtleGl0X3JlYXNvbiAeKAAwBTgBFBMaFndhc190aHJvdHRsZWRfZm9yX3RpbWUgHygAMAg4ARQTGhp3YXNfdGhyb3R0bGVkX2Zvcl9yZXF1ZXN0cyAgKAAwCDgBFBMaDnRocm90dGxlZF90aW1lICEoADADOAEUExoLc2VydmVyX25hbWUgIigCMAk4ARTCARphcHBob3N0aW5nLkxvZ1NlcnZpY2VFcnJvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -2553,8 +3029,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.LogReadRequest'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChlhcHBob3N0aW5nLkxvZ1JlYWRSZXF1ZXN0ExoGYXBwX2lkIAEoAjAJOAIUExoKdmVyc2lvbl9pZCACKAIwCTgDFBMaCnN0YXJ0X3RpbWUgAygAMAM4ARQTGghlbmRfdGltZSAEKAAwAzgBFBMaBm9mZnNldCAFKAIwCzgBShRhcHBob3N0aW5nLkxvZ09mZnNldBQTGgpyZXF1ZXN0X2lkIAYoAjAJOAMUExoRbWluaW11bV9sb2dfbGV2ZWwgBygAMAU4ARQTGhJpbmNsdWRlX2luY29tcGxldGUgCCgAMAg4ARQTGgVjb3VudCAJKAAwAzgBFBMaEGluY2x1ZGVfYXBwX2xvZ3MgCigAMAg4ARQTGgxpbmNsdWRlX2hvc3QgCygAMAg4ARQTGgtpbmNsdWRlX2FsbCAMKAAwCDgBFBMaDmNhY2hlX2l0ZXJhdG9yIA0oADAIOAEUwgEXYXBwaG9zdGluZy5GbHVzaFJlcXVlc3Q="))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChlhcHBob3N0aW5nLkxvZ1JlYWRSZXF1ZXN0ExoGYXBwX2lkIAEoAjAJOAIUExoKdmVyc2lvbl9pZCACKAIwCTgDFBMaCnN0YXJ0X3RpbWUgAygAMAM4ARQTGghlbmRfdGltZSAEKAAwAzgBFBMaBm9mZnNldCAFKAIwCzgBShRhcHBob3N0aW5nLkxvZ09mZnNldBQTGgpyZXF1ZXN0X2lkIAYoAjAJOAMUExoRbWluaW11bV9sb2dfbGV2ZWwgBygAMAU4ARQTGhJpbmNsdWRlX2luY29tcGxldGUgCCgAMAg4ARQTGgVjb3VudCAJKAAwAzgBFBMaEGluY2x1ZGVfYXBwX2xvZ3MgCigAMAg4ARQTGgxpbmNsdWRlX2hvc3QgCygAMAg4ARQTGgtpbmNsdWRlX2FsbCAMKAAwCDgBFBMaDmNhY2hlX2l0ZXJhdG9yIA0oADAIOAEUwgEaYXBwaG9zdGluZy5Mb2dTZXJ2aWNlRXJyb3I="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -2749,8 +3226,9 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.LogReadResponse'
_SERIALIZED_DESCRIPTOR = array.array('B')
- _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChphcHBob3N0aW5nLkxvZ1JlYWRSZXNwb25zZRMaA2xvZyABKAIwCzgDShVhcHBob3N0aW5nLlJlcXVlc3RMb2cUExoGb2Zmc2V0IAIoAjALOAFKFGFwcGhvc3RpbmcuTG9nT2Zmc2V0FMIBF2FwcGhvc3RpbmcuRmx1c2hSZXF1ZXN0"))
+ _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WithcHBob3N0aW5nL2FwaS9sb2dzZXJ2aWNlL2xvZ19zZXJ2aWNlLnByb3RvChphcHBob3N0aW5nLkxvZ1JlYWRSZXNwb25zZRMaA2xvZyABKAIwCzgDShVhcHBob3N0aW5nLlJlcXVlc3RMb2cUExoGb2Zmc2V0IAIoAjALOAFKFGFwcGhvc3RpbmcuTG9nT2Zmc2V0FMIBGmFwcGhvc3RpbmcuTG9nU2VydmljZUVycm9y"))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
@@ -2763,6 +3241,7 @@
__slots__ = (
'_protorpc_Flush', '_full_name_Flush',
'_protorpc_SetStatus', '_full_name_SetStatus',
+ '_protorpc_Read', '_full_name_Read',
)
def __init__(self, rpc_stub):
@@ -2776,6 +3255,10 @@
self._full_name_SetStatus = self._stub.GetFullMethodName(
'SetStatus')
+ self._protorpc_Read = pywraprpc.RPC()
+ self._full_name_Read = self._stub.GetFullMethodName(
+ 'Read')
+
def Flush(self, request, rpc=None, callback=None, response=None):
"""Make a Flush RPC call.
@@ -2826,6 +3309,31 @@
callback,
self._protorpc_SetStatus)
+ def Read(self, request, rpc=None, callback=None, response=None):
+ """Make a Read RPC call.
+
+ Args:
+ request: a LogReadRequest instance.
+ rpc: Optional RPC instance to use for the call.
+ callback: Optional final callback. Will be called as
+ callback(rpc, result) when the rpc completes. If None, the
+ call is synchronous.
+ response: Optional ProtocolMessage to be filled in with response.
+
+ Returns:
+ The LogReadResponse if callback is None. Otherwise, returns None.
+ """
+
+ if response is None:
+ response = LogReadResponse
+ return self._MakeCall(rpc,
+ self._full_name_Read,
+ 'Read',
+ request,
+ response,
+ callback,
+ self._protorpc_Read)
+
class _LogService_ClientStub(_LogService_ClientBaseStub):
__slots__ = ('_params',)
@@ -2913,6 +3421,17 @@
"""
raise NotImplementedError
+
+ def Read(self, rpc, request, response):
+ """Handles a Read RPC call. You should override this.
+
+ Args:
+ rpc: a Stubby RPC object
+ request: a LogReadRequest that contains the client request
+ response: a LogReadResponse that should be modified to send the response
+ """
+ raise NotImplementedError
+
def _AddMethodAttributes(self):
"""Sets attributes on Python RPC handlers.
@@ -2930,6 +3449,12 @@
google.appengine.api.api_base_pb.VoidProto,
None,
'none')
+ rpcserver._GetHandlerDecorator(
+ self.Read.im_func,
+ LogReadRequest,
+ LogReadResponse,
+ None,
+ 'none')
-__all__ = ['FlushRequest','SetStatusRequest','LogOffset','LogLine','RequestLog','LogReadRequest','LogReadResponse','LogService']
+__all__ = ['LogServiceError','UserAppLogLine','UserAppLogGroup','FlushRequest','SetStatusRequest','LogOffset','LogLine','RequestLog','LogReadRequest','LogReadResponse','LogService']
diff --git a/google/appengine/api/logservice/logservice.py b/google/appengine/api/logservice/logservice.py
index 4fa0ff7..37a7cb4 100755
--- a/google/appengine/api/logservice/logservice.py
+++ b/google/appengine/api/logservice/logservice.py
@@ -21,7 +21,8 @@
"""
LogService API.
-This module allows apps to flush logs and provide status messages.
+This module allows apps to flush logs, provide status messages, as well as the
+ability to programmatically access their log files.
"""
@@ -29,21 +30,19 @@
+import cStringIO
import os
import sys
+import threading
import time
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.logservice import log_service_pb
+from google.appengine.api.logservice import logsutil
from google.appengine.runtime import apiproxy_errors
-_flush_time = None
-_log_buffer_lines = None
-_request_id = None
-
-
AUTOFLUSH_ENABLED = True
@@ -55,76 +54,406 @@
AUTOFLUSH_EVERY_LINES = 20
+
+
+MAX_ITEMS_PER_FETCH = 20
+
+
+LOG_LEVEL_DEBUG = 0
+LOG_LEVEL_INFO = 1
+LOG_LEVEL_WARNING = 2
+LOG_LEVEL_ERROR = 3
+LOG_LEVEL_CRITICAL = 4
+
+
+class Error(Exception):
+ """Base error class for this module."""
+
+
+class InvalidArgumentError(Error):
+ """Function argument has invalid value."""
+
+
+class LogsBuffer(object):
+ """Threadsafe buffer for storing and periodically flushing app logs."""
+
+ def __init__(self, stream=None, stderr=False):
+ """Initializes the buffer, which wraps the given stream or sys.stderr.
+
+ The state of the LogsBuffer is protected by a separate lock. The lock is
+ acquired before any variables are mutated or accessed, and released
+ afterward. A recursive lock is used so that a single thread can acquire the
+ lock multiple times, and release it only when an identical number of
+ 'unlock()' calls have been performed.
+
+ Args:
+ stream: A file-like object to store logs. Defaults to a cStringIO object.
+ stderr: If specified, use sys.stderr as the underlying stream.
+ """
+ self._stderr = stderr
+ if self._stderr:
+ assert stream is None
+ else:
+ self._stream = stream or cStringIO.StringIO()
+ self._lock = threading.RLock()
+ self._reset()
+
+ def _lock_and_call(self, method, *args):
+ """Calls 'method' while holding the buffer lock."""
+ self._lock.acquire()
+ try:
+ return method(*args)
+ finally:
+ self._lock.release()
+
+ def stream(self):
+ """Returns the underlying file-like object used to buffer logs."""
+ if self._stderr:
+
+
+ return sys.stderr
+ else:
+ return self._stream
+
+ def lines(self):
+ """Returns the number of log lines currently buffered."""
+ return self._lock_and_call(lambda: self._lines)
+
+ def bytes(self):
+ """Returns the size of the log buffer, in bytes."""
+ return self._lock_and_call(lambda: self._bytes)
+
+ def age(self):
+ """Returns the number of seconds since the log buffer was flushed."""
+ return self._lock_and_call(lambda: time.time() - self._flush_time)
+
+ def flush_time(self):
+ """Returns last time that the log buffer was flushed."""
+ return self._lock_and_call(lambda: self._flush_time)
+
+ def contents(self):
+ """Returns the contents of the logs buffer."""
+ return self._lock_and_call(self._contents)
+
+ def _contents(self):
+ """Internal version of contents() with no locking."""
+ try:
+ return self.stream().getvalue()
+ except AttributeError:
+
+
+ return ''
+
+ def reset(self):
+ """Resets the buffer state, without clearing the underlying stream."""
+ self._lock_and_call(self._reset)
+
+ def _reset(self):
+ """Internal version of reset() with no locking."""
+ contents = self._contents()
+ self._bytes = len(contents)
+ self._lines = len(contents.split('\n')) - 1
+ self._flush_time = time.time()
+ self._request = logsutil.RequestID()
+
+ def clear(self):
+ """Clears the contents of the logs buffer, and resets autoflush state."""
+ self._lock_and_call(self._clear)
+
+ def _clear(self):
+ """Internal version of clear() with no locking."""
+ if self._bytes > 0:
+ self.stream().truncate(0)
+ self._reset()
+
+ def close(self):
+ """Closes the underlying stream, flushing the current contents."""
+ self._lock_and_call(self._close)
+
+ def _close(self):
+ """Internal version of close() with no locking."""
+ self._flush()
+ self.stream().close()
+
+ def parse_logs(self):
+ """Parse the contents of the buffer and return an array of log lines."""
+ return logsutil.ParseLogs(self.contents())
+
+ def write(self, line):
+ """Writes a line to the logs buffer."""
+ return self._lock_and_call(self._write, line)
+
+ def _write(self, line):
+ """Writes a line to the logs buffer."""
+ if self._request != logsutil.RequestID():
+
+
+ self._reset()
+ self.stream().write(line)
+
+
+
+
+ self.stream().flush()
+ self._lines += 1
+ self._bytes += len(line)
+ self._autoflush()
+
+ def flush(self):
+ """Flushes the contents of the logs buffer.
+
+ This method holds the buffer lock until the API call has finished to ensure
+ that flush calls are performed in the correct order, so that log messages
+ written during the flush call aren't dropped or accidentally wiped, and so
+ that the other buffer state variables (flush time, lines, bytes) are updated
+ synchronously with the flush.
+ """
+ self._lock_and_call(self._flush)
+
+ def _flush(self):
+ """Internal version of flush() with no locking."""
+ logs = self.parse_logs()
+ self._clear()
+
+ if len(logs) == 0:
+ return
+
+ request = log_service_pb.FlushRequest()
+ group = log_service_pb.UserAppLogGroup()
+ for entry in logs:
+ line = group.add_log_line()
+ line.set_timestamp_usec(entry[0])
+ line.set_level(entry[1])
+ line.set_message(entry[2])
+ request.set_logs(group.Encode())
+ response = api_base_pb.VoidProto()
+ apiproxy_stub_map.MakeSyncCall('logservice', 'Flush', request, response)
+
+ def autoflush(self):
+ """Flushes the buffer if certain conditions have been met."""
+ self._lock_and_call(self._autoflush)
+
+ def _autoflush(self):
+ """Internal version of autoflush() with no locking."""
+ if not self.autoflush_enabled():
+ return
+
+ if ((AUTOFLUSH_EVERY_SECONDS and self.age() >= AUTOFLUSH_EVERY_SECONDS) or
+ (AUTOFLUSH_EVERY_LINES and self.lines() >= AUTOFLUSH_EVERY_LINES) or
+ (AUTOFLUSH_EVERY_BYTES and self.bytes() >= AUTOFLUSH_EVERY_BYTES)):
+ self._flush()
+
+ def autoflush_enabled(self):
+ """Indicates if the buffer will periodically flush logs during a request."""
+ return AUTOFLUSH_ENABLED and 'BACKEND_ID' in os.environ
+
+
+
+_global_buffer = LogsBuffer(stderr=True)
+
+
+def logs_buffer():
+ """Returns the LogsBuffer used by the current request."""
+
+
+
+
+ return _global_buffer
+
+
+def write(message):
+ """Adds 'message' to the logs buffer, and checks for autoflush.
+
+ Args:
+ message: A message (string) to be written to application logs.
+ """
+ logs_buffer().write(message)
+
+
+def clear():
+ """Clear the logs buffer and reset the autoflush state."""
+ logs_buffer().clear()
+
+
+def autoflush():
+ """If AUTOFLUSH conditions have been met, performs a Flush API call."""
+ logs_buffer().autoflush()
+
+
def flush():
"""Flushes log lines that are currently buffered."""
- request = log_service_pb.FlushRequest()
- response = api_base_pb.VoidProto()
- apiproxy_stub_map.MakeSyncCall('logservice', 'Flush', request, response)
- _reset(True)
+ logs_buffer().flush()
+
def flush_time():
- """Returns last time that the log buffer was flushed."""
- return _flush_time
+ """Returns last time that the logs buffer was flushed."""
+ return logs_buffer().flush_time()
+
+
+def log_buffer_age():
+ """Returns the number of seconds since the logs buffer was flushed."""
+ return logs_buffer().age()
+
def log_buffer_contents():
"""Returns the contents of the logs buffer."""
- try:
- return _log_buffer().getvalue()
- except AttributeError:
+ return logs_buffer().contents()
- return ''
-
def log_buffer_bytes():
- """Returns the size of the log buffer, in bytes."""
- return len(log_buffer_contents())
+ """Returns the size of the logs buffer, in bytes."""
+ return logs_buffer().bytes()
+
def log_buffer_lines():
"""Returns the number of log lines currently buffered."""
- return _log_buffer_lines
-
-def auto_flush(lines_emitted=0):
- """Invoked by app_logging.emit() to automatically flush logs."""
- _check_new_request()
- global _log_buffer_lines, _request_id
- _log_buffer_lines += lines_emitted
-
- if not AUTOFLUSH_ENABLED:
- return
-
- if 'BACKEND_ID' not in os.environ:
- return
-
- log_buffer_age = time.time() - flush_time()
- if AUTOFLUSH_EVERY_SECONDS and log_buffer_age >= AUTOFLUSH_EVERY_SECONDS:
- flush()
- elif AUTOFLUSH_EVERY_LINES and log_buffer_lines() >= AUTOFLUSH_EVERY_LINES:
- flush()
- elif AUTOFLUSH_EVERY_BYTES and log_buffer_bytes() >= AUTOFLUSH_EVERY_BYTES:
- flush()
-
-def _log_buffer():
- """Returns the buffer used for log messages."""
- return sys.stderr
-
-def _check_new_request():
- """Checks if a new request is being processed, and if so, clears state."""
- global _request_id
- current_request = None
- if 'REQUEST_ID_HASH' in os.environ:
- current_request = os.environ['REQUEST_ID_HASH']
- if current_request != _request_id:
- _request_id = current_request
+ return logs_buffer().lines()
- _reset()
+class _LogQueryResult(object):
+ """A container that holds logs and a cursor to fetch additional logs.
-def _reset(truncate=False):
- """Empties the contents of the log buffer and updates the flush time."""
- global _log_buffer_lines, _flush_time
- if truncate and log_buffer_bytes() > 0:
- _log_buffer().truncate(0)
- _log_buffer_lines = 0
- _flush_time = time.time()
+ A _LogQueryResult object is the standard returned item for a call to fetch().
+ It is iterable - each value returned is a log that the user has queried for,
+ and internally, it holds a cursor that it uses to fetch more results once the
+ current, locally held set, are exhausted.
+ """
-_reset()
+ def __init__(self, response):
+ """Constructor.
+
+ Args:
+ response: A LogReadResponse object acquired from a call to fetch().
+ """
+ self._logs = response.log_
+ self._cursor = response.offset_
+ self._current_log = 0
+ self._num_logs = len(self._logs)
+
+ def __iter__(self):
+ """Provides an iterator that yields log records one at a time.
+
+ This iterator yields items held locally first, and once these items have
+ been exhausted, it fetched more items via _advance() and yields them. The
+ number of items it holds is min(MAX_ITEMS_PER_FETCH, batch_size) - the
+ latter value can be provided by the user on an initial call to fetch().
+ """
+ while True:
+ for log_item in self._logs:
+ yield log_item
+ if self._cursor:
+ self._advance()
+ else:
+ break
+
+ def _advance(self):
+ """Acquires additional logs via cursor.
+
+ This method is used by the iterator when it has exhausted its current set of
+ logs to acquire more logs and update its internal structures accordingly.
+ """
+ request = log_service_pb.LogReadRequest()
+ response = log_service_pb.LogReadResponse()
+
+ request.set_app_id(os.environ['APPLICATION_ID'])
+
+ if self._cursor:
+ request.offset_ = self._cursor
+
+ apiproxy_stub_map.MakeSyncCall('logservice', 'Read', request, response)
+ self._logs = response.log_
+ self._cursor = response.offset_
+
+
+def fetch(start_time_usec=None,
+ end_time_usec=None,
+ batch_size=MAX_ITEMS_PER_FETCH,
+ min_log_level=None,
+ include_incomplete=False,
+ include_app_logs=True,
+ version_ids=None):
+ """Fetches an application's request and/or application-level logs.
+
+ Args:
+ start_time_usec: A long corresponding to the earliest time (in microseconds
+ since epoch) that results should be fetched for.
+ end_time_usec: A long corresponding to the latest time (in microseconds
+ since epoch) that results should be fetched for.
+ batch_size: The maximum number of log records that this request should
+ return. A log record corresponds to a web request made to the
+ application. Therefore, it may include a single request log and multiple
+ application level logs (e.g., WARN and INFO messages).
+ min_log_level: The minimum app log level that this request should be
+ returned. This means that querying for a certain log level always returns
+ that log level and all log levels above it. In ascending order, the log
+ levels available are: logs.DEBUG, logs.INFO, logs.WARNING, logs.ERROR,
+ and logs.CRITICAL.
+ include_incomplete: Whether or not to include requests that have started but
+ not yet finished, as a boolean.
+ include_app_logs: Whether or not to include application level logs in the
+ results, as a boolean.
+ version_ids: A list of version ids whose logs should be queried against.
+ Defaults to the application's current version id only.
+
+ Returns:
+ An iterable object containing the logs that the user has queried for.
+
+ Raises:
+ InvalidArgumentError: Raised if any of the input parameters are not of the
+ correct type.
+ """
+
+ request = log_service_pb.LogReadRequest()
+ response = log_service_pb.LogReadResponse()
+
+ request.set_app_id(os.environ['APPLICATION_ID'])
+
+ if start_time_usec:
+ if not isinstance(start_time_usec, long):
+ raise InvalidArgumentError('start_time_usec must be a long')
+ request.set_start_time(start_time_usec)
+
+ if end_time_usec:
+ if not isinstance(end_time_usec, long):
+ raise InvalidArgumentError('end_time_usec must be a long')
+ request.set_end_time(end_time_usec)
+
+ if not isinstance(batch_size, int):
+ raise InvalidArgumentError('batch_size must be an integer')
+
+ if batch_size < 1:
+ raise InvalidArgumentError('batch_size must be greater than zero')
+
+ if batch_size > MAX_ITEMS_PER_FETCH:
+ raise InvalidArgumentError('batch_size specified was too large')
+ request.set_count(batch_size)
+
+ if min_log_level:
+ if not isinstance(min_log_level, int):
+ raise InvalidArgumentError('min_log_level must be an int')
+
+ if not min_log_level in range(LOG_LEVEL_CRITICAL+1):
+ raise InvalidArgumentError("""min_log_level must be between 0 and 4
+ inclusive""")
+ request.set_minimum_log_level(min_log_level)
+
+ if not isinstance(include_incomplete, bool):
+ raise InvalidArgumentError('include_incomplete must be boolean')
+
+ request.set_include_incomplete(include_incomplete)
+
+ if not isinstance(include_app_logs, bool):
+ raise InvalidArgumentError('include_app_logs must be boolean')
+
+ request.set_include_app_logs(include_app_logs)
+
+ if version_ids is None:
+ version_ids = [os.environ['CURRENT_VERSION_ID']]
+
+ if not isinstance(version_ids, list):
+ raise InvalidArgumentError('version_ids must be a list')
+
+ request.version_id_ = version_ids
+
+ apiproxy_stub_map.MakeSyncCall('logservice', 'Read', request, response)
+ return _LogQueryResult(response)
diff --git a/google/appengine/api/logservice/logsutil.py b/google/appengine/api/logservice/logsutil.py
new file mode 100644
index 0000000..1ac10f5
--- /dev/null
+++ b/google/appengine/api/logservice/logsutil.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+"""Utility methods for working with logs."""
+
+
+import os
+import time
+
+
+
+REQUEST_KEY = 'REQUEST_ID_HASH'
+
+
+def RequestID():
+ """Returns the ID of the current request assigned by App Engine."""
+ return os.environ.get(REQUEST_KEY, None)
+
+
+def ParseLogEntry(entry):
+ """Parses a single log entry emitted by app_logging.AppLogsHandler.
+
+ Parses a log entry of the form LOG <level> <timestamp> <message> where the
+ level is in the range [0, 4]. If the entry is not of that form, take the whole
+ entry to be the message. Null characters in the entry are replaced by
+ newlines.
+
+ Args:
+ entry: The log entry to parse.
+
+ Returns:
+ A (timestamp, level, message) tuple.
+ """
+ split = entry.split(' ', 3)
+ if len(split) == 4 and split[0] == 'LOG':
+ level = split[1]
+ timestamp = split[2]
+ message = split[3]
+ try:
+ message = str(message)
+ timestamp = int(timestamp)
+ level = int(level)
+ except ValueError:
+ pass
+ else:
+ if 0 <= level <= 4:
+ return timestamp, level, message.replace('\0', '\n')
+ usec = int(time.time() * 1e6)
+ return usec, 3, entry.replace('\0', '\n')
+
+
+def ParseLogs(logs):
+ """Parses a str containing newline separated log entries.
+
+ Parses a series of log entries in the form LOG <level> <timestamp> <message>
+ where the level is in the range [0, 4]. Null characters in the entry are
+ replaced by newlines.
+
+ Args:
+ logs: A string containing the log entries.
+
+ Returns:
+ A list of (timestamp, level, message) tuples.
+ """
+ return [ParseLogEntry(line) for line in logs.split('\n') if line]
diff --git a/google/appengine/api/mail_service_pb.py b/google/appengine/api/mail_service_pb.py
index a442f9c..5d1bb8a 100644
--- a/google/appengine/api/mail_service_pb.py
+++ b/google/appengine/api/mail_service_pb.py
@@ -111,6 +111,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MailServiceError'
class MailAttachment(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
@@ -249,6 +250,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MailAttachment'
class MailHeader(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -387,6 +389,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MailHeader'
class MailMessage(ProtocolBuffer.ProtocolMessage):
has_sender_ = 0
sender_ = ""
@@ -862,5 +865,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MailMessage'
__all__ = ['MailServiceError','MailAttachment','MailHeader','MailMessage']
diff --git a/google/appengine/api/memcache/__init__.py b/google/appengine/api/memcache/__init__.py
index e0aef9d..3e99b6c 100755
--- a/google/appengine/api/memcache/__init__.py
+++ b/google/appengine/api/memcache/__init__.py
@@ -640,12 +640,7 @@
results = rpc.get_result()
if not results:
return DELETE_NETWORK_FAILURE
- status = results[0]
- if status == MemcacheDeleteResponse.DELETED:
- return DELETE_SUCCESSFUL
- elif status == MemcacheDeleteResponse.NOT_FOUND:
- return DELETE_ITEM_MISSING
- assert False, 'Unexpected deletion status code.'
+ return results[0]
def delete_multi(self, keys, seconds=0, key_prefix='', namespace=None):
"""Delete multiple keys at once.
@@ -678,8 +673,9 @@
Returns:
A UserRPC instance whose get_result() method returns None if
there was a network error, or a list of status values otherwise,
- where each status corresponds to a key and is either DELETED or
- NOT_FOUND.
+ where each status corresponds to a key and is either
+ DELETE_SUCCESSFUL, DELETE_ITEM_MISSING, or DELETE_NETWORK_FAILURE
+ (see delete() docstring for details).
"""
if not isinstance(seconds, (int, long, float)):
raise TypeError('Delete timeout must be a number.')
@@ -704,7 +700,15 @@
rpc.check_success()
except apiproxy_errors.Error:
return None
- return rpc.response.delete_status_list()
+ result = []
+ for status in rpc.response.delete_status_list():
+ if status == MemcacheDeleteResponse.DELETED:
+ result.append(DELETE_SUCCESSFUL)
+ elif status == MemcacheDeleteResponse.NOT_FOUND:
+ result.append(DELETE_ITEM_MISSING)
+ else:
+ result.append(DELETE_NETWORK_FAILURE)
+ return result
diff --git a/google/appengine/api/memcache/memcache_service_pb.py b/google/appengine/api/memcache/memcache_service_pb.py
index 6a9703f..a188d49 100644
--- a/google/appengine/api/memcache/memcache_service_pb.py
+++ b/google/appengine/api/memcache/memcache_service_pb.py
@@ -107,6 +107,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheServiceError'
class AppOverride(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -245,6 +246,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.AppOverride'
class MemcacheGetRequest(ProtocolBuffer.ProtocolMessage):
has_name_space_ = 0
name_space_ = ""
@@ -466,6 +468,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheGetRequest'
class MemcacheGetResponse_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -801,6 +804,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheGetResponse'
class MemcacheSetRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -1305,6 +1309,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheSetRequest'
class MemcacheSetResponse(ProtocolBuffer.ProtocolMessage):
@@ -1425,6 +1430,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheSetResponse'
class MemcacheDeleteRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -1737,6 +1743,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheDeleteRequest'
class MemcacheDeleteResponse(ProtocolBuffer.ProtocolMessage):
@@ -1853,6 +1860,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheDeleteResponse'
class MemcacheIncrementRequest(ProtocolBuffer.ProtocolMessage):
@@ -2185,6 +2193,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheIncrementRequest'
class MemcacheIncrementResponse(ProtocolBuffer.ProtocolMessage):
@@ -2328,6 +2337,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheIncrementResponse'
class MemcacheBatchIncrementRequest(ProtocolBuffer.ProtocolMessage):
has_name_space_ = 0
name_space_ = ""
@@ -2525,6 +2535,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheBatchIncrementRequest'
class MemcacheBatchIncrementResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2638,6 +2649,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheBatchIncrementResponse'
class MemcacheFlushRequest(ProtocolBuffer.ProtocolMessage):
has_override_ = 0
override_ = None
@@ -2748,6 +2760,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheFlushRequest'
class MemcacheFlushResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2812,6 +2825,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheFlushResponse'
class MemcacheStatsRequest(ProtocolBuffer.ProtocolMessage):
has_override_ = 0
override_ = None
@@ -2922,6 +2936,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheStatsRequest'
class MergedNamespaceStats(ProtocolBuffer.ProtocolMessage):
has_hits_ = 0
hits_ = 0
@@ -3214,6 +3229,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MergedNamespaceStats'
class MemcacheStatsResponse(ProtocolBuffer.ProtocolMessage):
has_stats_ = 0
stats_ = None
@@ -3324,6 +3340,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheStatsResponse'
class MemcacheGrabTailRequest(ProtocolBuffer.ProtocolMessage):
has_item_count_ = 0
item_count_ = 0
@@ -3507,6 +3524,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheGrabTailRequest'
class MemcacheGrabTailResponse_Item(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = ""
@@ -3735,5 +3753,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.MemcacheGrabTailResponse'
__all__ = ['MemcacheServiceError','AppOverride','MemcacheGetRequest','MemcacheGetResponse','MemcacheGetResponse_Item','MemcacheSetRequest','MemcacheSetRequest_Item','MemcacheSetResponse','MemcacheDeleteRequest','MemcacheDeleteRequest_Item','MemcacheDeleteResponse','MemcacheIncrementRequest','MemcacheIncrementResponse','MemcacheBatchIncrementRequest','MemcacheBatchIncrementResponse','MemcacheFlushRequest','MemcacheFlushResponse','MemcacheStatsRequest','MergedNamespaceStats','MemcacheStatsResponse','MemcacheGrabTailRequest','MemcacheGrabTailResponse','MemcacheGrabTailResponse_Item']
diff --git a/google/appengine/api/prospective_search/error_pb.py b/google/appengine/api/prospective_search/error_pb.py
index 7d31c64..20db306 100755
--- a/google/appengine/api/prospective_search/error_pb.py
+++ b/google/appengine/api/prospective_search/error_pb.py
@@ -101,5 +101,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.Error'
__all__ = ['Error']
diff --git a/google/appengine/api/prospective_search/prospective_search_pb.py b/google/appengine/api/prospective_search/prospective_search_pb.py
index 74a8d24..9b92af4 100755
--- a/google/appengine/api/prospective_search/prospective_search_pb.py
+++ b/google/appengine/api/prospective_search/prospective_search_pb.py
@@ -220,6 +220,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SchemaEntry'
class SubscribeRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
@@ -487,6 +488,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscribeRequest'
class SubscribeResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -551,6 +553,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscribeResponse'
class UnsubscribeRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
@@ -689,6 +692,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.UnsubscribeRequest'
class UnsubscribeResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -753,6 +757,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.UnsubscribeResponse'
class SubscriptionRecord(ProtocolBuffer.ProtocolMessage):
@@ -1016,6 +1021,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscriptionRecord'
class ListSubscriptionsRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
@@ -1251,6 +1257,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListSubscriptionsRequest'
class ListSubscriptionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1364,6 +1371,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListSubscriptionsResponse'
class ListTopicsRequest(ProtocolBuffer.ProtocolMessage):
has_topic_start_ = 0
topic_start_ = ""
@@ -1526,6 +1534,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListTopicsRequest'
class ListTopicsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1629,6 +1638,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListTopicsResponse'
class MatchRequest(ProtocolBuffer.ProtocolMessage):
@@ -1969,6 +1979,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.MatchRequest'
class MatchResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2033,5 +2044,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.MatchResponse'
__all__ = ['SchemaEntry','SubscribeRequest','SubscribeResponse','UnsubscribeRequest','UnsubscribeResponse','SubscriptionRecord','ListSubscriptionsRequest','ListSubscriptionsResponse','ListTopicsRequest','ListTopicsResponse','MatchRequest','MatchResponse']
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index cee69b4..3e65e82 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -327,6 +327,14 @@
return True
+def _GetList(a_list):
+ """Utility function that conversts None to the empty list."""
+ if a_list is None:
+ return []
+ else:
+ return list(a_list)
+
+
def _ConvertToList(arg):
"""Converts arg to a list, empty if None, single element if not a list."""
if isinstance(arg, basestring):
@@ -432,9 +440,7 @@
This class should not be directly instantiated.
"""
- _CONSTRUCTOR_KWARGS = frozenset(['name', 'value', 'language'])
-
- def __init__(self, **kwargs):
+ def __init__(self, name, value, language=None):
"""Initializer.
Args:
@@ -443,26 +449,20 @@
reserved pattern '_[A-Z]*' nor start with '!'. Further, field
names cannot contain non-space whitespace characters.
value: The value of the field which can be a str, unicode or date.
- (optional)
language: The ISO 693-1 two letter code of the language used in the value.
- (optional) See
- http://www.sil.org/iso639-3/codes.asp?order=639_1&letter=%25 for a list
- of valid codes. Correct specification of language code will assist in
- correct tokenization of the field. If None is given, then the language
- code of the document will be used.
+ See http://www.sil.org/iso639-3/codes.asp?order=639_1&letter=%25 for a
+ list of valid codes. Correct specification of language code will assist
+ in correct tokenization of the field. If None is given, then the
+ language code of the document will be used.
Raises:
TypeError: If any of the parameters have invalid types, or an unknown
attribute is passed.
ValueError: If any of the parameters have invalid values.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
-
- self._name = _CheckFieldName(kwargs.get('name'))
- self._language = _CheckLanguage(kwargs.get('language'))
- self._value = self._CheckValue(kwargs.get('value'))
+ self._name = _CheckFieldName(name)
+ self._value = self._CheckValue(value)
+ self._language = _CheckLanguage(language)
@property
def name(self):
@@ -513,19 +513,19 @@
TextField(name='signature', value='brzydka pogoda', language='pl')
"""
- def __init__(self, **kwargs):
+ def __init__(self, name, value=None, language=None):
"""Initializer.
Args:
name: The name of the field.
- value: A str or unicode object containing text. (optional)
- language: The code of the language the value is encoded in. (optional)
+ value: A str or unicode object containing text.
+ language: The code of the language the value is encoded in.
Raises:
TypeError: If value is not a string.
ValueError: If value is longer than allowed.
"""
- Field.__init__(self, **kwargs)
+ Field.__init__(self, name, value, language)
def _CheckValue(self, value):
return _CheckText(value)
@@ -542,20 +542,20 @@
HtmlField(name='content', value='<html>herbata, kawa</html>', language='pl')
"""
- def __init__(self, **kwargs):
+ def __init__(self, name, value=None, language=None):
"""Initializer.
Args:
name: The name of the field.
value: A str or unicode object containing the searchable content of the
- Field. (optional)
- language: The code of the language the value is encoded in. (optional)
+ Field.
+ language: The code of the language the value is encoded in.
Raises:
TypeError: If value is not a string.
ValueError: If value is longer than allowed.
"""
- Field.__init__(self, **kwargs)
+ Field.__init__(self, name, value, language)
def _CheckValue(self, value):
return _CheckHtml(value)
@@ -572,20 +572,19 @@
AtomField(name='contributor', value='foo@bar.com')
"""
- def __init__(self, **kwargs):
+ def __init__(self, name, value=None, language=None):
"""Initializer.
Args:
name: The name of the field.
value: A str or unicode object to be treated as an indivisible text value.
- (optional)
- language: The code of the language the value is encoded in. (optional)
+ language: The code of the language the value is encoded in.
Raises:
TypeError: If value is not a string.
ValueError: If value is longer than allowed.
"""
- Field.__init__(self, **kwargs)
+ Field.__init__(self, name, value, language)
def _CheckValue(self, value):
return _CheckAtom(value)
@@ -602,17 +601,17 @@
DateField(name='creation_date', value=datetime.date(2011, 03, 11))
"""
- def __init__(self, **kwargs):
+ def __init__(self, name, value=None):
"""Initializer.
Args:
name: The name of the field.
- value: A datetime.date but not a datetime.datetime. (optional)
+ value: A datetime.date but not a datetime.datetime.
Raises:
TypeError: If value is not a datetime.date or is a datetime.datetime.
"""
- Field.__init__(self, **kwargs)
+ Field.__init__(self, name, value)
def _CheckValue(self, value):
return _CheckDate(value)
@@ -638,20 +637,20 @@
def _NewFieldFromPb(pb):
"""Constructs a Field from a document_pb.Field protocol buffer."""
+ name = pb.name()
value = _GetValue(pb.value())
lang = None
if pb.value().has_language():
lang = pb.value().language()
- args = dict(name=pb.name(), value=value, language=lang)
val_type = pb.value().type()
if val_type == document_pb.FieldValue.TEXT:
- return TextField(**args)
+ return TextField(name, value, lang)
elif val_type == document_pb.FieldValue.HTML:
- return HtmlField(**args)
+ return HtmlField(name, value, lang)
elif val_type == document_pb.FieldValue.ATOM:
- return AtomField(**args)
+ return AtomField(name, value, lang)
elif val_type == document_pb.FieldValue.DATE:
- return DateField(**args)
+ return DateField(name, value)
raise InternalError('Unknown field value type %d', val_type)
@@ -671,42 +670,37 @@
"""
_FIRST_JAN_2011 = datetime.datetime(2011, 1, 1)
-
- DEFAULT_LANGUAGE = 'en'
-
- _CONSTRUCTOR_KWARGS = frozenset(['doc_id', 'fields', 'language', 'order_id'])
-
- def __init__(self, **kwargs):
+ def __init__(self, doc_id=None, fields=None, language='en', order_id=None):
"""Initializer.
Args:
doc_id: The printable ASCII string identifying the document which does
not start with '!' which is reserved. Non-space whitespace characters
- are also excluded from ids.
+ are also excluded from ids. If no id is provided, the search service
+ will provide one.
fields: An iterable of Field instances representing the content of the
- document. (optional)
+ document.
language: The code of the language used in the field values. Defaults
- to DEFAULT_LANGUAGE. (optional)
+ to 'en' (English).
order_id: The id used to specify the order this document will be returned
in search results, where 0 <= order_id <= sys.maxint. Defaults to the
number of seconds since 1st Jan 2011. Documents are returned in
- descending order of the order ID. (optional)
+ descending order of the order ID.
Raises:
TypeError: If any of the parameters have invalid types, or an unknown
attribute is passed.
ValueError: If any of the parameters have invalid values.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
+ if doc_id is not None:
+ _CheckDocumentId(doc_id)
+ self._doc_id = doc_id
+ self._fields = _GetList(fields)
+ self._language = _CheckLanguage(language)
- self._doc_id = _CheckDocumentId(kwargs.get('doc_id'))
- self._fields = list(kwargs.get('fields', []))
- self._language = _CheckLanguage(kwargs.get('language',
- self.DEFAULT_LANGUAGE))
- self._order_id = self._CheckOrderId(
- kwargs.get('order_id', self._GetDefaultOrderId()))
+ if order_id is None:
+ order_id = self._GetDefaultOrderId()
+ self._order_id = self._CheckOrderId(order_id)
@property
def doc_id(self):
@@ -746,7 +740,8 @@
def _CopyDocumentToProtocolBuffer(document, pb):
"""Copies Document to a document_pb.Document protocol buffer."""
pb.set_storage(document_pb.Document.DISK)
- pb.set_id(document.doc_id)
+ if document.doc_id:
+ pb.set_id(document.doc_id)
if document.language:
pb.set_language(document.language)
for field in document.fields:
@@ -786,9 +781,7 @@
_MAXIMUM_EXPRESSION_LENGTH = 1000
_MAXIMUM_OPERATOR_LENGTH = 100
- _CONSTRUCTOR_KWARGS = frozenset(['name', 'expression'])
-
- def __init__(self, **kwargs):
+ def __init__(self, name, expression):
"""Initializer.
Args:
@@ -801,19 +794,14 @@
attribute is passed.
ValueError: If any of the parameters has an invalid value.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
+ self._name = _CheckFieldName(name)
- self._name = _CheckFieldName(kwargs.get('name'))
- self._expression = kwargs.get('expression')
-
- if self._expression is None:
+ if expression is None:
raise ValueError('expression in FieldExpression cannot be null')
- if not isinstance(self._expression, basestring):
+ if not isinstance(expression, basestring):
raise TypeError('expression expected in FieldExpression, but got %s' %
- type(self._expression))
- self._expression = str(self._expression)
+ type(expression))
+ self._expression = str(expression)
@property
def name(self):
@@ -859,10 +847,7 @@
MIN_FIELD_VALUE = ''
- _CONSTRUCTOR_KWARGS = frozenset(['expression', 'sort_descending',
- 'default_value'])
-
- def __init__(self, **kwargs):
+ def __init__(self, expression, sort_descending=True, default_value=None):
"""Initializer.
Args:
@@ -872,23 +857,18 @@
which will add the score from a scorer to a count of the values
of a likes field times 0.1.
sort_descending: Whether to sort in descending or ascending order.
- Defaults to True, descending. (optional)
default_value: The default value of the named field, if none
present for a document. A text value must be specified for text sorts.
- A numeric value must be specified for numeric sorts. (optional)
+ A numeric value must be specified for numeric sorts.
Raises:
TypeError: If any of the parameters has an invalid type, or an unknown
attribute is passed.
ValueError: If any of the parameters has an invalid value.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
-
- self._expression = _CheckExpression(kwargs.get('expression'))
- self._sort_descending = kwargs.get('sort_descending', True)
- self._default_value = kwargs.get('default_value')
+ self._expression = _CheckExpression(expression)
+ self._sort_descending = sort_descending
+ self._default_value = default_value
if isinstance(self.default_value, basestring):
_CheckText(self._default_value, 'default_value')
elif self._default_value is not None:
@@ -939,36 +919,28 @@
"""
GENERIC, MATCH_SCORER = ('GENERIC', 'MATCH_SCORER')
- _DEFAULT_LIMIT = 1000
_MAXIMUM_LIMIT = 10000
_TYPES = frozenset([GENERIC, MATCH_SCORER])
- _CONSTRUCTOR_KWARGS = frozenset(['scorer_type', 'limit'])
-
- def __init__(self, **kwargs):
+ def __init__(self, scorer_type=GENERIC, limit=1000):
"""Initializer.
Args:
scorer_type: The type of scorer to use on search results. Defaults to
- GENERIC. (optional) The possible types include:
+ GENERIC. The possible types include:
GENERIC: A generic scorer that uses match scoring and rescoring.
MATCH_SCORER: A scorer that returns a score based on term frequency
divided by document frequency.
- limit: The limit on the number of documents to score. Defaults to
- _DEFAULT_LIMIT. (optional)
+ limit: The limit on the number of documents to score. Defaults to 1000.
Raises:
TypeError: If any of the parameters have invalid types, or an unknown
attribute is passed.
ValueError: If any of the parameters have invalid values.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
-
- self._scorer_type = self._CheckType(kwargs.get('scorer_type', self.GENERIC))
- self._limit = self._CheckLimit(kwargs.get('limit', self._DEFAULT_LIMIT))
+ self._scorer_type = self._CheckType(scorer_type)
+ self._limit = self._CheckLimit(limit)
@property
def scorer_type(self):
@@ -1026,25 +998,18 @@
snippeted_fields=['content'])
"""
- NONE, SINGLE, PER_RESULT = ('NONE', 'SINGLE', 'PER_RESULT')
+ SINGLE, PER_RESULT = ('SINGLE', 'PER_RESULT')
- DEFAULT_LIMIT = 20
- DEFAULT_MATCHED_COUNT_ACCURACY = 100
-
- _CURSOR_TYPES = frozenset([NONE, SINGLE, PER_RESULT])
+ _CURSOR_TYPES = frozenset([SINGLE, PER_RESULT])
_MAXIMUM_QUERY_LENGTH = 1000
_MAXIMUM_LIMIT = 800
_MAXIMUM_MATCHED_COUNT_ACCURACY = 10000
_MAXIMUM_FIELDS_TO_RETURN = 100
- _CONSTRUCTOR_KWARGS = frozenset(['query', 'offset', 'limit',
- 'matched_count_accuracy',
- 'cursor', 'cursor_type', 'sort_specs',
- 'scorer_spec', 'returned_fields',
- 'snippeted_fields', 'returned_expressions',
- 'app_id'])
-
- def __init__(self, **kwargs):
+ def __init__(self, query, offset=0, limit=20, matched_count_accuracy=100,
+ cursor=None, cursor_type=None, sort_specs=None, scorer_spec=None,
+ returned_fields=None, snippeted_fields=None,
+ returned_expressions=None, **kwargs):
"""Initializer.
Args:
@@ -1062,36 +1027,33 @@
sony brand and a price field which is 300 (inclusive) to 400
(exclusive).
offset: The offset is number of documents to skip in results.
- Defaults to 0. (optional)
limit: The limit on number of documents to return in results.
- Defaults to DEFAULT_LIMIT. (optional)
matched_count_accuracy: The minimum accuracy requirement for
SearchResponse.matched_count. If set, the matched_count will be
accurate up to at least that number. For example, when set to 100,
any SearchResponse with matched_count <= 100 is accurate. This option
may add considerable latency/expense, especially when used with
- returned_fields. Defaults to DEFAULT_MATCHED_COUNT_ACCURACY. (optional)
+ returned_fields.
cursor: A cursor returned in a previous set of search results to use
as a starting point to retrieve the next set of results. This can get
you better performance, and also improves the consistency of pagination
- through index updates. (optional)
- cursor_type: The type of cursor returned results will have. Defaults to
- SearchRequest.NONE. (optional) Possible types are:
- NONE: No cursor will be returned in results.
+ through index updates.
+ cursor_type: The type of cursor returned results will have, if any.
+ Possible types are:
SINGLE: A single cursor will be returned to continue from the end of
the results.
PER_RESULT: One cursor will be returned with each search result, so
you can continue after any result.
sort_specs: An iterable of SortSpecs specifying a multi-dimensional sort
- over the search results. (optional)
- score_spec: The ScorerSpec specifying which scorer to use to score
- documents. (optional)
+ over the search results.
+ scorer_spec: The ScorerSpec specifying which scorer to use to score
+ documents.
returned_fields: An iterable of names of fields to return in search
- results. (optional)
+ results.
snippeted_fields: An iterable of names of fields to snippet and return
- in search result expressions. (optional)
+ in search result expressions.
returned_expressions: An iterable of FieldExpression to evaluate and
- return in search results. (optional)
+ return in search results.
Raises:
TypeError: If any of the parameters have invalid types, or an unknown
@@ -1101,28 +1063,30 @@
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
- self._query = self._CheckQuery(kwargs.get('query'))
- self._offset = self._CheckOffset(kwargs.get('offset', 0))
- self._limit = self._CheckLimit(
- kwargs.get('limit', SearchRequest.DEFAULT_LIMIT))
- self._app_id = kwargs.get('app_id')
+ if 'app_id' in kwargs:
+ self._app_id = kwargs.pop('app_id')
+ else:
+ self._app_id = None
+
+ if kwargs:
+ raise TypeError('Invalid arguments: %s' % ', '.join(kwargs))
+
+ self._query = self._CheckQuery(query)
+ self._offset = self._CheckOffset(offset)
+ self._limit = self._CheckLimit(limit)
self._matched_count_accuracy = self._CheckMatchedCountAccuracy(
- kwargs.get('matched_count_accuracy',
- SearchRequest.DEFAULT_MATCHED_COUNT_ACCURACY))
- self._cursor = self._CheckCursor(kwargs.get('cursor'))
- self._cursor_type = self._CheckCursorType(
- kwargs.get('cursor_type', SearchRequest.NONE))
- self._sort_specs = list(kwargs.get('sort_specs', []))
- self._scorer_spec = kwargs.get('scorer_spec')
- self._returned_fields = _CheckFieldNames(
- _ConvertToList(kwargs.get('returned_fields', [])))
- self._snippeted_fields = _CheckFieldNames(
- _ConvertToList(kwargs.get('snippeted_fields', [])))
- self._returned_expressions = _ConvertToList(
- kwargs.get('returned_expressions', []))
+ matched_count_accuracy)
+
+ self._cursor = self._CheckCursor(cursor)
+ self._cursor_type = self._CheckCursorType(cursor_type)
+
+ self._sort_specs = _GetList(sort_specs)
+ self._scorer_spec = scorer_spec
+
+ self._returned_fields = _CheckFieldNames(_ConvertToList(returned_fields))
+ self._snippeted_fields = _CheckFieldNames(_ConvertToList(snippeted_fields))
+ self._returned_expressions = _ConvertToList(returned_expressions)
+
if (len(self._returned_expressions) + len(self._snippeted_fields) +
len(self._returned_fields)) > self._MAXIMUM_FIELDS_TO_RETURN:
raise ValueError(
@@ -1215,7 +1179,9 @@
empty_ok=True)
def _CheckCursorType(self, cursor_type):
- """Checks the cursor_type is one specified in _CURSOR_TYPES."""
+ """Checks the cursor_type is one specified in _CURSOR_TYPES or None."""
+ if cursor_type is None:
+ return None
return _CheckEnum(cursor_type, 'cursor_type',
values=SearchRequest._CURSOR_TYPES)
@@ -1234,7 +1200,7 @@
_CURSOR_TYPE_PB_MAP = {
- SearchRequest.NONE: search_service_pb.SearchParams.NONE,
+ None: search_service_pb.SearchParams.NONE,
SearchRequest.SINGLE: search_service_pb.SearchParams.SINGLE,
SearchRequest.PER_RESULT: search_service_pb.SearchParams.PER_RESULT
}
@@ -1277,11 +1243,8 @@
class SearchResult(object):
"""Represents a result of executing a search request."""
- _CONSTRUCTOR_KWARGS = frozenset(['document', 'sort_scores',
- 'expressions', 'cursor'])
-
- def __init__(self, **kwargs):
+ def __init__(self, document, sort_scores=None, expressions=None, cursor=None):
"""Initializer.
Args:
@@ -1289,24 +1252,20 @@
specified in a SearchRequest will be returned in the document.
sort_scores: The list of scores assigned during sort evaluation. Each
sort dimension is included. Positive scores are used for ascending
- sorts; negative scores for descending. (optional)
+ sorts; negative scores for descending.
expressions: The list of computed fields which are the result of
- expressions requested. (optional)
- cursor: A cursor associated with the document. (optional)
+ expressions requested.
+ cursor: A cursor associated with the document.
Raises:
TypeError: If any of the parameters have invalid types, or an unknown
attribute is passed.
ValueError: If any of the parameters have invalid values.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
- self._document = kwargs.get('document')
- self._sort_scores = list(
- self._CheckSortScores(kwargs.get('sort_scores', [])))
- self._expressions = list(kwargs.get('expressions', []))
- self._cursor = self._CheckCursor(kwargs.get('cursor'))
+ self._document = document
+ self._sort_scores = self._CheckSortScores(_GetList(sort_scores))
+ self._expressions = _GetList(expressions)
+ self._cursor = self._CheckCursor(cursor)
@property
def document(self):
@@ -1368,35 +1327,24 @@
class SearchResponse(object):
"""Represents the result of executing a search request."""
- _CONSTRUCTOR_KWARGS = frozenset(['operation_result', 'results',
- 'matched_count', 'returned_count'])
-
- def __init__(self, **kwargs):
+ def __init__(self, matched_count, results=None, operation_result=None):
"""Initializer.
Args:
operation_result: The OperationResult of the search including error code
and message if any.
results: The list of SearchResult returned from executing a search
- request. (optional)
+ request.
matched_count: The number of documents matched by the query.
- returned_count: The number of documents returned in the
- results list.
Raises:
TypeError: If any of the parameters have an invalid type, or an unknown
attribute is passed.
ValueError: If any of the parameters have an invalid value.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
- self._operation_result = kwargs.get('operation_result')
- self._results = list(kwargs.get('results', []))
- self._matched_count = _CheckInteger(
- kwargs.get('matched_count'), 'matched_count')
- self._returned_count = _CheckInteger(
- kwargs.get('returned_count'), 'returned_count')
+ self._operation_result = operation_result
+ self._matched_count = _CheckInteger(matched_count, 'matched_count')
+ self._results = _GetList(results)
def __iter__(self):
@@ -1426,16 +1374,16 @@
"""
return self._matched_count
+
@property
def returned_count(self):
"""Returns the count of documents returned in results."""
- return self._returned_count
+ return len(self._results)
def __repr__(self):
return _Repr(self, [('operation_result', self.operation_result),
('results', self.results),
- ('matched_count', self.matched_count),
- ('returned_count', self.returned_count)])
+ ('matched_count', self.matched_count)])
class OperationResult(object):
@@ -1446,28 +1394,22 @@
_CODES = frozenset([OK, INVALID_REQUEST, TRANSIENT_ERROR, INTERNAL_ERROR])
- _CONSTRUCTOR_KWARGS = frozenset(['code', 'message'])
-
- def __init__(self, **kwargs):
+ def __init__(self, code, message=None):
"""Initializer.
Args:
code: The error or success code of the operation.
- message: An error message associated with any error. (optional)
+ message: An error message associated with any error.
Raises:
TypeError: If an unknown attribute is passed.
ValueError: If an unknown code is passed.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
-
- self._code = kwargs.get('code')
+ self._code = code
if self._code not in self._CODES:
raise ValueError('Unknown operation result code %r, must be one of %s'
% (self._code, self._CODES))
- self._message = kwargs.get('message')
+ self._message = message
if self._message is not None and not isinstance(self._message, basestring):
raise TypeError('message must be a string: %r' % self._message)
@@ -1563,9 +1505,8 @@
_CONSISTENCY_MODES = [GLOBALLY_CONSISTENT, PER_DOCUMENT_CONSISTENT]
- _CONSTRUCTOR_KWARGS = frozenset(['name', 'namespace', 'consistency'])
-
- def __init__(self, **kwargs):
+ def __init__(self, name, namespace=None,
+ consistency=PER_DOCUMENT_CONSISTENT):
"""Initializer.
Args:
@@ -1575,24 +1516,19 @@
namespace: The namespace of the index name.
consistency: The consistency mode of the index, either GLOBALLY_CONSISTENT
or PER_DOCUMENT_CONSISTENT. Defaults to PER_DOCUMENT_CONSISTENT.
- (optional)
Raises:
TypeError: If an unknown attribute is passed.
ValueError: If an unknown consistency mode, or invalid namespace is given.
"""
- args_diff = set(kwargs.iterkeys()) - self._CONSTRUCTOR_KWARGS
- if args_diff:
- raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
-
- self._name = _CheckIndexName(kwargs.get('name'))
- self._namespace = kwargs.get('namespace')
+ self._name = _CheckIndexName(name)
+ self._namespace = namespace
if self._namespace is None:
self._namespace = namespace_manager.get_namespace()
if self._namespace is None:
self._namespace = ''
namespace_manager.validate_namespace(self._namespace, exception=ValueError)
- self._consistency = kwargs.get('consistency', self.PER_DOCUMENT_CONSISTENT)
+ self._consistency = consistency
if self._consistency not in self._CONSISTENCY_MODES:
raise ValueError('consistency must be one of %s' %
self._CONSISTENCY_MODES)
@@ -1792,8 +1728,7 @@
return SearchResponse(
operation_result=_NewOperationResultFromPb(response.status()),
results=results,
- matched_count=response.matched_count(),
- returned_count=response.result_size())
+ matched_count=response.matched_count())
def list_documents(self, start_doc_id=None, include_start_doc=True,
limit=100, keys_only=False, **kwargs):
diff --git a/google/appengine/api/search/search_service_pb.py b/google/appengine/api/search/search_service_pb.py
index 31d3187..0adc932 100644
--- a/google/appengine/api/search/search_service_pb.py
+++ b/google/appengine/api/search/search_service_pb.py
@@ -33,12 +33,14 @@
INVALID_REQUEST = 1
TRANSIENT_ERROR = 2
INTERNAL_ERROR = 3
+ PERMISSION_DENIED = 4
_ErrorCode_NAMES = {
0: "OK",
1: "INVALID_REQUEST",
2: "TRANSIENT_ERROR",
3: "INTERNAL_ERROR",
+ 4: "PERMISSION_DENIED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -107,6 +109,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchServiceError'
class RequestStatus(ProtocolBuffer.ProtocolMessage):
has_status_ = 0
status_ = 0
@@ -240,6 +243,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.RequestStatus'
class IndexSpec(ProtocolBuffer.ProtocolMessage):
@@ -454,6 +458,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexSpec'
class IndexMetadata(ProtocolBuffer.ProtocolMessage):
has_index_spec_ = 0
@@ -610,6 +615,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexMetadata'
class IndexDocumentParams(ProtocolBuffer.ProtocolMessage):
@@ -811,6 +817,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexDocumentParams'
class IndexDocumentRequest(ProtocolBuffer.ProtocolMessage):
has_params_ = 0
has_app_id_ = 0
@@ -948,10 +955,12 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexDocumentRequest'
class IndexDocumentResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.status_ = []
+ self.doc_id_ = []
if contents is not None: self.MergeFromString(contents)
def status_size(self): return len(self.status_)
@@ -970,16 +979,35 @@
def clear_status(self):
self.status_ = []
+ def doc_id_size(self): return len(self.doc_id_)
+ def doc_id_list(self): return self.doc_id_
+
+ def doc_id(self, i):
+ return self.doc_id_[i]
+
+ def set_doc_id(self, i, x):
+ self.doc_id_[i] = x
+
+ def add_doc_id(self, x):
+ self.doc_id_.append(x)
+
+ def clear_doc_id(self):
+ self.doc_id_ = []
+
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.status_size()): self.add_status().CopyFrom(x.status(i))
+ for i in xrange(x.doc_id_size()): self.add_doc_id(x.doc_id(i))
def Equals(self, x):
if x is self: return 1
if len(self.status_) != len(x.status_): return 0
for e1, e2 in zip(self.status_, x.status_):
if e1 != e2: return 0
+ if len(self.doc_id_) != len(x.doc_id_): return 0
+ for e1, e2 in zip(self.doc_id_, x.doc_id_):
+ if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -992,28 +1020,39 @@
n = 0
n += 1 * len(self.status_)
for i in xrange(len(self.status_)): n += self.lengthString(self.status_[i].ByteSize())
+ n += 1 * len(self.doc_id_)
+ for i in xrange(len(self.doc_id_)): n += self.lengthString(len(self.doc_id_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.status_)
for i in xrange(len(self.status_)): n += self.lengthString(self.status_[i].ByteSizePartial())
+ n += 1 * len(self.doc_id_)
+ for i in xrange(len(self.doc_id_)): n += self.lengthString(len(self.doc_id_[i]))
return n
def Clear(self):
self.clear_status()
+ self.clear_doc_id()
def OutputUnchecked(self, out):
for i in xrange(len(self.status_)):
out.putVarInt32(10)
out.putVarInt32(self.status_[i].ByteSize())
self.status_[i].OutputUnchecked(out)
+ for i in xrange(len(self.doc_id_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.doc_id_[i])
def OutputPartial(self, out):
for i in xrange(len(self.status_)):
out.putVarInt32(10)
out.putVarInt32(self.status_[i].ByteSizePartial())
self.status_[i].OutputPartial(out)
+ for i in xrange(len(self.doc_id_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.doc_id_[i])
def TryMerge(self, d):
while d.avail() > 0:
@@ -1024,6 +1063,9 @@
d.skip(length)
self.add_status().TryMerge(tmp)
continue
+ if tt == 18:
+ self.add_doc_id(d.getPrefixedString())
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -1040,6 +1082,12 @@
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
+ cnt=0
+ for e in self.doc_id_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("doc_id%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
return res
@@ -1047,20 +1095,24 @@
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstatus = 1
+ kdoc_id = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "status",
- }, 1)
+ 2: "doc_id",
+ }, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
- }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndexDocumentResponse'
class DeleteDocumentParams(ProtocolBuffer.ProtocolMessage):
has_index_spec_ = 0
@@ -1207,6 +1259,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DeleteDocumentParams'
class DeleteDocumentRequest(ProtocolBuffer.ProtocolMessage):
has_params_ = 0
has_app_id_ = 0
@@ -1344,6 +1397,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DeleteDocumentRequest'
class DeleteDocumentResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1457,6 +1511,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.DeleteDocumentResponse'
class ListDocumentsParams(ProtocolBuffer.ProtocolMessage):
has_index_spec_ = 0
has_start_doc_id_ = 0
@@ -1696,6 +1751,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListDocumentsParams'
class ListDocumentsRequest(ProtocolBuffer.ProtocolMessage):
has_params_ = 0
has_app_id_ = 0
@@ -1833,6 +1889,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListDocumentsRequest'
class ListDocumentsResponse(ProtocolBuffer.ProtocolMessage):
has_status_ = 0
@@ -1989,6 +2046,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListDocumentsResponse'
class ListIndexesParams(ProtocolBuffer.ProtocolMessage):
has_fetch_schema_ = 0
fetch_schema_ = 0
@@ -2287,6 +2345,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListIndexesParams'
class ListIndexesRequest(ProtocolBuffer.ProtocolMessage):
has_params_ = 0
has_app_id_ = 0
@@ -2424,6 +2483,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListIndexesRequest'
class ListIndexesResponse(ProtocolBuffer.ProtocolMessage):
has_status_ = 0
@@ -2580,6 +2640,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ListIndexesResponse'
class SortSpec(ProtocolBuffer.ProtocolMessage):
has_sort_expression_ = 0
sort_expression_ = ""
@@ -2781,6 +2842,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SortSpec'
class ScorerSpec(ProtocolBuffer.ProtocolMessage):
@@ -2956,6 +3018,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ScorerSpec'
class FieldSpec_Expression(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -3232,6 +3295,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.FieldSpec'
class SearchParams(ProtocolBuffer.ProtocolMessage):
@@ -3711,6 +3775,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchParams'
class SearchRequest(ProtocolBuffer.ProtocolMessage):
has_params_ = 0
has_app_id_ = 0
@@ -3848,6 +3913,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchRequest'
class SearchResult(ProtocolBuffer.ProtocolMessage):
has_document_ = 0
has_cursor_ = 0
@@ -4079,6 +4145,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchResult'
class SearchResponse(ProtocolBuffer.ProtocolMessage):
has_matched_count_ = 0
matched_count_ = 0
@@ -4308,5 +4375,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SearchResponse'
__all__ = ['SearchServiceError','RequestStatus','IndexSpec','IndexMetadata','IndexDocumentParams','IndexDocumentRequest','IndexDocumentResponse','DeleteDocumentParams','DeleteDocumentRequest','DeleteDocumentResponse','ListDocumentsParams','ListDocumentsRequest','ListDocumentsResponse','ListIndexesParams','ListIndexesRequest','ListIndexesResponse','SortSpec','ScorerSpec','FieldSpec','FieldSpec_Expression','SearchParams','SearchRequest','SearchResult','SearchResponse']
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index 444e943..e521373 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -34,6 +34,7 @@
import random
import string
import urllib
+import uuid
from whoosh import analysis
@@ -284,9 +285,8 @@
self._tokenizer = tokenizer
self._inverted_index = {}
- def AddDocument(self, document):
+ def AddDocument(self, doc_id, document):
"""Adds a document into the index."""
- doc_id = document.id()
token_position = 0
for field in document.field_list():
self._AddTokens(doc_id, field.name(), field.value(),
@@ -353,13 +353,17 @@
"""Indexes an iterable DocumentPb.Document."""
for document in documents:
doc_id = document.id()
+ if not doc_id:
+ doc_id = str(uuid.uuid4())
+ document.set_id(doc_id)
+ response.add_doc_id(doc_id)
if doc_id in self._documents:
old_document = self._documents[doc_id]
self._inverted_index.RemoveDocument(old_document)
self._documents[doc_id] = document
new_status = response.add_status()
new_status.set_status(search_service_pb.SearchServiceError.OK)
- self._inverted_index.AddDocument(document)
+ self._inverted_index.AddDocument(doc_id, document)
def DeleteDocuments(self, document_ids, response):
"""Deletes documents for the given document_ids."""
@@ -371,6 +375,10 @@
delete_status = response.add_status()
delete_status.set_status(search_service_pb.SearchServiceError.OK)
+ def Documents(self):
+ """Returns the documents in the index."""
+ return self._documents.values()
+
def _DocumentsForPostings(self, postings):
"""Returns the documents for the given postings."""
docs = []
@@ -606,6 +614,44 @@
response.mutable_status().set_status(
search_service_pb.SearchServiceError.OK)
+ def _AddDocument(self, response, document, keys_only):
+ doc = response.add_document()
+ if keys_only:
+ doc.set_id(document.id())
+ else:
+ doc.MergeFrom(document)
+
+ def _Dynamic_ListDocuments(self, request, response):
+ """A local implementation of SearchService.ListDocuments RPC.
+
+ Args:
+ request: A search_service_pb.ListDocumentsRequest.
+ response: An search_service_pb.ListDocumentsResponse.
+ """
+ params = request.params()
+ index = self._GetIndex(params.index_spec(), create=True)
+ if index is None:
+ self._UnknownIndex(response.mutable_status(), params.index_spec())
+ return
+
+ num_docs = 0
+ start = not params.has_start_doc_id()
+ for document in index.Documents():
+ if start:
+ if num_docs < params.limit():
+ self._AddDocument(response, document, params.keys_only())
+ num_docs += 1
+ else:
+ if params.has_start_doc_id():
+ if document.id() is params.start_doc_id():
+ start = True
+ if params.include_start_doc():
+ self._AddDocument(response, document, params.keys_only())
+ num_docs += 1
+
+ response.mutable_status().set_status(
+ search_service_pb.SearchServiceError.OK)
+
def _RandomSearchResponse(self, request, response):
random.seed()
@@ -676,6 +722,21 @@
response.set_matched_count(matched_count)
+ def _DefaultFillSearchResponse(self, params, results, response):
+ """Fills the SearchResponse with the first set of results."""
+ position_range = range(0, min(params.limit(), len(results)))
+ self._FillSearchResponse(results, position_range, params.cursor_type(),
+ response)
+
+ def _FillSearchResponse(self, results, position_range, cursor_type, response):
+ """Fills the SearchResponse with a selection of results."""
+ for i in position_range:
+ result = results[i]
+ search_result = response.add_result()
+ search_result.mutable_document().CopyFrom(result)
+ if cursor_type is search_service_pb.SearchParams.PER_RESULT:
+ search_result.set_cursor(result.id())
+
def _Dynamic_Search(self, request, response):
"""A local implementation of SearchService.Search RPC.
@@ -697,26 +758,33 @@
except IndexConsistencyError, exception:
self._InvalidRequest(response.mutable_status(), exception)
-
-
-
params = request.params()
- docs_to_return = 20
- if params.has_limit():
- docs_to_return = params.limit()
-
results = index.Search(params)
-
response.set_matched_count(len(results))
- count = 0
- for i in xrange(len(results)):
- result = results[i]
- search_result = response.add_result()
- search_result.mutable_document().CopyFrom(result)
- count += 1
- if count >= docs_to_return:
- break
+ offset = 0
+ if params.has_cursor():
+ positions = [i for i in range(len(results)) if results[i].id() is
+ params.cursor()]
+ if positions:
+ offset = positions[0] + 1
+ elif params.has_offset():
+ offset = params.offset()
+
+
+
+ if offset < len(results):
+ position_range = range(
+ offset,
+ min(offset + params.limit(), len(results)))
+ else:
+ position_range = range(0)
+ self._FillSearchResponse(results, position_range, params.cursor_type(),
+ response)
+ if (params.cursor_type() is search_service_pb.SearchParams.SINGLE and
+ len(position_range)):
+ response.set_cursor(results[position_range[len(position_range) - 1]].id())
+
response.status().set_status(search_service_pb.SearchServiceError.OK)
def __repr__(self):
diff --git a/google/appengine/api/system/system_service_pb.py b/google/appengine/api/system/system_service_pb.py
index 452a48e..96bb3a4 100644
--- a/google/appengine/api/system/system_service_pb.py
+++ b/google/appengine/api/system/system_service_pb.py
@@ -101,6 +101,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SystemServiceError'
class SystemStat(ProtocolBuffer.ProtocolMessage):
has_current_ = 0
current_ = 0.0
@@ -365,6 +366,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.SystemStat'
class GetSystemStatsRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -429,6 +431,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetSystemStatsRequest'
class GetSystemStatsResponse(ProtocolBuffer.ProtocolMessage):
has_cpu_ = 0
cpu_ = None
@@ -588,6 +591,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetSystemStatsResponse'
class StartBackgroundRequestRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -652,6 +656,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.StartBackgroundRequestRequest'
class StartBackgroundRequestResponse(ProtocolBuffer.ProtocolMessage):
has_request_id_ = 0
request_id_ = ""
@@ -746,5 +751,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.StartBackgroundRequestResponse'
__all__ = ['SystemServiceError','SystemStat','GetSystemStatsRequest','GetSystemStatsResponse','StartBackgroundRequestRequest','StartBackgroundRequestResponse']
diff --git a/google/appengine/api/taskqueue/taskqueue_service_pb.py b/google/appengine/api/taskqueue/taskqueue_service_pb.py
index b68b17a..72286f2 100755
--- a/google/appengine/api/taskqueue/taskqueue_service_pb.py
+++ b/google/appengine/api/taskqueue/taskqueue_service_pb.py
@@ -156,6 +156,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueServiceError'
class TaskQueueRetryParameters(ProtocolBuffer.ProtocolMessage):
has_retry_limit_ = 0
retry_limit_ = 0
@@ -386,6 +387,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueRetryParameters'
class TaskQueueAcl(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -489,6 +491,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAcl'
class TaskQueueHttpHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -627,6 +630,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueHttpHeader'
class TaskQueueMode(ProtocolBuffer.ProtocolMessage):
@@ -704,6 +708,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueMode'
class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -1594,6 +1599,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddRequest'
class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
has_chosen_task_name_ = 0
chosen_task_name_ = ""
@@ -1688,6 +1694,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueAddResponse'
class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1801,6 +1808,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddRequest'
class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
@@ -2029,6 +2037,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueBulkAddResponse'
class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
@@ -2205,6 +2214,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteRequest'
class TaskQueueDeleteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -2308,6 +2318,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteResponse'
class TaskQueueForceRunRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -2485,6 +2496,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunRequest'
class TaskQueueForceRunResponse(ProtocolBuffer.ProtocolMessage):
has_result_ = 0
result_ = 0
@@ -2584,6 +2596,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueForceRunResponse'
class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -3052,6 +3065,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueRequest'
class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -3116,6 +3130,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateQueueResponse'
class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -3249,6 +3264,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesRequest'
class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
@@ -3842,6 +3858,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueuesResponse'
class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -4018,6 +4035,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsRequest'
class TaskQueueScannerQueueInfo(ProtocolBuffer.ProtocolMessage):
has_executed_last_minute_ = 0
executed_last_minute_ = 0
@@ -4261,6 +4279,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueScannerQueueInfo'
class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
has_num_tasks_ = 0
num_tasks_ = 0
@@ -4544,6 +4563,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchQueueStatsResponse'
class TaskQueuePauseQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -4719,6 +4739,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueRequest'
class TaskQueuePauseQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -4783,6 +4804,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePauseQueueResponse'
class TaskQueuePurgeQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -4916,6 +4938,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueRequest'
class TaskQueuePurgeQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -4980,6 +5003,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueuePurgeQueueResponse'
class TaskQueueDeleteQueueRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -5118,6 +5142,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueRequest'
class TaskQueueDeleteQueueResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -5182,6 +5207,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteQueueResponse'
class TaskQueueDeleteGroupRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -5281,6 +5307,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupRequest'
class TaskQueueDeleteGroupResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -5345,6 +5372,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueDeleteGroupResponse'
class TaskQueueQueryTasksRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -5580,6 +5608,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksRequest'
class TaskQueueQueryTasksResponse_TaskHeader(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -6792,6 +6821,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryTasksResponse'
class TaskQueueFetchTaskRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -6964,6 +6994,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskRequest'
class TaskQueueFetchTaskResponse(ProtocolBuffer.ProtocolMessage):
has_task_ = 0
@@ -7067,6 +7098,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueFetchTaskResponse'
class TaskQueueUpdateStorageLimitRequest(ProtocolBuffer.ProtocolMessage):
has_app_id_ = 0
app_id_ = ""
@@ -7205,6 +7237,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitRequest'
class TaskQueueUpdateStorageLimitResponse(ProtocolBuffer.ProtocolMessage):
has_new_limit_ = 0
new_limit_ = 0
@@ -7304,6 +7337,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueUpdateStorageLimitResponse'
class TaskQueueQueryAndOwnTasksRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
@@ -7479,6 +7513,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksRequest'
class TaskQueueQueryAndOwnTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
has_task_name_ = 0
task_name_ = ""
@@ -7780,6 +7815,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueQueryAndOwnTasksResponse'
class TaskQueueModifyTaskLeaseRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
@@ -7994,6 +8030,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseRequest'
class TaskQueueModifyTaskLeaseResponse(ProtocolBuffer.ProtocolMessage):
has_updated_eta_usec_ = 0
updated_eta_usec_ = 0
@@ -8093,5 +8130,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.TaskQueueModifyTaskLeaseResponse'
__all__ = ['TaskQueueServiceError','TaskQueueRetryParameters','TaskQueueAcl','TaskQueueHttpHeader','TaskQueueMode','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueForceRunRequest','TaskQueueForceRunResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePauseQueueRequest','TaskQueuePauseQueueResponse','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueDeleteGroupRequest','TaskQueueDeleteGroupResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task','TaskQueueFetchTaskRequest','TaskQueueFetchTaskResponse','TaskQueueUpdateStorageLimitRequest','TaskQueueUpdateStorageLimitResponse','TaskQueueQueryAndOwnTasksRequest','TaskQueueQueryAndOwnTasksResponse','TaskQueueQueryAndOwnTasksResponse_Task','TaskQueueModifyTaskLeaseRequest','TaskQueueModifyTaskLeaseResponse']
diff --git a/google/appengine/api/urlfetch_service_pb.py b/google/appengine/api/urlfetch_service_pb.py
index 193f82f..08cafeb 100644
--- a/google/appengine/api/urlfetch_service_pb.py
+++ b/google/appengine/api/urlfetch_service_pb.py
@@ -111,6 +111,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.URLFetchServiceError'
class URLFetchRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -577,6 +578,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.URLFetchRequest'
class URLFetchResponse_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -1121,5 +1123,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.URLFetchResponse'
__all__ = ['URLFetchServiceError','URLFetchRequest','URLFetchRequest_Header','URLFetchResponse','URLFetchResponse_Header']
diff --git a/google/appengine/api/user_service_pb.py b/google/appengine/api/user_service_pb.py
index 5fa920e..4217093 100644
--- a/google/appengine/api/user_service_pb.py
+++ b/google/appengine/api/user_service_pb.py
@@ -109,6 +109,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.UserServiceError'
class CreateLoginURLRequest(ProtocolBuffer.ProtocolMessage):
has_destination_url_ = 0
destination_url_ = ""
@@ -276,6 +277,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateLoginURLRequest'
class CreateLoginURLResponse(ProtocolBuffer.ProtocolMessage):
has_login_url_ = 0
login_url_ = ""
@@ -375,6 +377,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateLoginURLResponse'
class CreateLogoutURLRequest(ProtocolBuffer.ProtocolMessage):
has_destination_url_ = 0
destination_url_ = ""
@@ -508,6 +511,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateLogoutURLRequest'
class CreateLogoutURLResponse(ProtocolBuffer.ProtocolMessage):
has_logout_url_ = 0
logout_url_ = ""
@@ -607,6 +611,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateLogoutURLResponse'
class GetOAuthUserRequest(ProtocolBuffer.ProtocolMessage):
has_scope_ = 0
scope_ = ""
@@ -701,6 +706,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetOAuthUserRequest'
class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
@@ -946,6 +952,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.GetOAuthUserResponse'
class CheckOAuthSignatureRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1010,6 +1017,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CheckOAuthSignatureRequest'
class CheckOAuthSignatureResponse(ProtocolBuffer.ProtocolMessage):
has_oauth_consumer_key_ = 0
oauth_consumer_key_ = ""
@@ -1109,6 +1117,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CheckOAuthSignatureResponse'
class CreateFederatedLoginRequest(ProtocolBuffer.ProtocolMessage):
has_claimed_id_ = 0
claimed_id_ = ""
@@ -1281,6 +1290,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateFederatedLoginRequest'
class CreateFederatedLoginResponse(ProtocolBuffer.ProtocolMessage):
has_redirected_url_ = 0
redirected_url_ = ""
@@ -1380,6 +1390,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateFederatedLoginResponse'
class CreateFederatedLogoutRequest(ProtocolBuffer.ProtocolMessage):
has_destination_url_ = 0
destination_url_ = ""
@@ -1479,6 +1490,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateFederatedLogoutRequest'
class CreateFederatedLogoutResponse(ProtocolBuffer.ProtocolMessage):
has_logout_url_ = 0
logout_url_ = ""
@@ -1578,5 +1590,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CreateFederatedLogoutResponse'
__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse','GetOAuthUserRequest','GetOAuthUserResponse','CheckOAuthSignatureRequest','CheckOAuthSignatureResponse','CreateFederatedLoginRequest','CreateFederatedLoginResponse','CreateFederatedLogoutRequest','CreateFederatedLogoutResponse']
diff --git a/google/appengine/api/xmpp/xmpp_service_pb.py b/google/appengine/api/xmpp/xmpp_service_pb.py
index 1e8eb42..102095e 100644
--- a/google/appengine/api/xmpp/xmpp_service_pb.py
+++ b/google/appengine/api/xmpp/xmpp_service_pb.py
@@ -115,6 +115,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppServiceError'
class PresenceRequest(ProtocolBuffer.ProtocolMessage):
has_jid_ = 0
jid_ = ""
@@ -248,6 +249,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.PresenceRequest'
class PresenceResponse(ProtocolBuffer.ProtocolMessage):
@@ -398,6 +400,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.PresenceResponse'
class XmppMessageRequest(ProtocolBuffer.ProtocolMessage):
has_body_ = 0
body_ = ""
@@ -642,6 +645,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppMessageRequest'
class XmppMessageResponse(ProtocolBuffer.ProtocolMessage):
@@ -760,6 +764,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppMessageResponse'
class XmppSendPresenceRequest(ProtocolBuffer.ProtocolMessage):
has_jid_ = 0
jid_ = ""
@@ -995,6 +1000,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppSendPresenceRequest'
class XmppSendPresenceResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1059,6 +1065,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppSendPresenceResponse'
class XmppInviteRequest(ProtocolBuffer.ProtocolMessage):
has_jid_ = 0
jid_ = ""
@@ -1192,6 +1199,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppInviteRequest'
class XmppInviteResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -1256,5 +1264,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.XmppInviteResponse'
__all__ = ['XmppServiceError','PresenceRequest','PresenceResponse','XmppMessageRequest','XmppMessageResponse','XmppSendPresenceRequest','XmppSendPresenceResponse','XmppInviteRequest','XmppInviteResponse']
diff --git a/google/appengine/base/capabilities_pb.py b/google/appengine/base/capabilities_pb.py
index 7229402..9483a9c 100644
--- a/google/appengine/base/capabilities_pb.py
+++ b/google/appengine/base/capabilities_pb.py
@@ -187,6 +187,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CapabilityConfigList'
class CapabilityConfig(ProtocolBuffer.ProtocolMessage):
@@ -512,5 +513,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.CapabilityConfig'
__all__ = ['CapabilityConfigList','CapabilityConfig']
diff --git a/google/appengine/datastore/datastore_pb.py b/google/appengine/datastore/datastore_pb.py
index b46d73e..44cd9b7 100644
--- a/google/appengine/datastore/datastore_pb.py
+++ b/google/appengine/datastore/datastore_pb.py
@@ -205,6 +205,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
class Query_Filter(ProtocolBuffer.ProtocolMessage):
@@ -1402,6 +1403,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
@@ -2412,6 +2414,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
has_property_ = 0
property_ = ""
@@ -2891,6 +2894,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
@@ -3185,6 +3189,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.RunCompiledQueryRequest'
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
@@ -3316,6 +3321,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
class Error(ProtocolBuffer.ProtocolMessage):
@@ -3409,6 +3415,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
has_requested_entity_puts_ = 0
requested_entity_puts_ = 0
@@ -3765,6 +3772,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
@@ -3996,6 +4004,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
@@ -4201,6 +4210,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
class PutRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
@@ -4519,6 +4529,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
@@ -4682,6 +4693,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
class TouchRequest(ProtocolBuffer.ProtocolMessage):
has_force_ = 0
force_ = 0
@@ -4882,6 +4894,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
class TouchResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
@@ -4992,6 +5005,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
@@ -5257,6 +5271,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
@@ -5367,6 +5382,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
has_count_ = 0
@@ -5572,6 +5588,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
@@ -5938,6 +5955,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
has_model_key_ = 0
has_size_ = 0
@@ -6109,6 +6127,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
has_start_ = 0
start_ = 0
@@ -6247,6 +6266,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -6360,6 +6380,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
@@ -6516,6 +6537,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -6580,6 +6602,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
@@ -6713,6 +6736,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
@@ -6823,5 +6847,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','RunCompiledQueryRequest','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse']
diff --git a/google/appengine/datastore/datastore_sqlite_stub.py b/google/appengine/datastore/datastore_sqlite_stub.py
index a9b606d..a077b9f 100644
--- a/google/appengine/datastore/datastore_sqlite_stub.py
+++ b/google/appengine/datastore/datastore_sqlite_stub.py
@@ -43,14 +43,11 @@
import itertools
import logging
import threading
+import weakref
from google.appengine.datastore import entity_pb
-from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
-from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_types
-from google.appengine.api.taskqueue import taskqueue_service_pb
-from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_stub_util
from google.appengine.datastore import sortable_pb_encoder
@@ -151,191 +148,22 @@
return ref
-class QueryCursor(datastore_stub_util.BaseCursor):
- """Encapsulates a database cursor and provides methods to fetch results."""
-
- def __init__(self, query, db_cursor):
- """Constructor.
-
- Args:
- query: A Query PB.
- db_cursor: An SQLite cursor returning n+2 columns. The first 2 columns
- must be the path of the entity and the entity itself, while the
- remaining columns must be the sort columns for the query.
- """
- super(QueryCursor, self).__init__(query.app())
- self.__query = query
- self.app = query.app()
- self.__cursor = db_cursor
+class _DedupingEntityIterator(object):
+ def __init__(self, cursor):
+ self.__cursor = cursor
self.__seen = set()
+ def __iter__(self):
+ return self
- self.__position = ('', '')
-
-
- self.__next_result = (None, None)
-
- if (query.has_compiled_cursor() and
- query.compiled_cursor().position_size()):
- self.ResumeFromCompiledCursor(query.compiled_cursor())
-
- if query.has_limit():
- self.limit = query.limit() + query.offset()
- else:
- self.limit = None
-
- def Count(self):
- """Counts results, up to the query's limit.
-
- Note this method does not deduplicate results, so the query it was generated
- from should have the 'distinct' clause applied.
-
- Returns:
- int: Result count.
- """
- count = 0
- while self.limit is None or count < self.limit:
- row = self.__cursor.fetchone()
- if not row:
- break
- count += 1
- return count
-
- def _EncodeCompiledCursor(self, cc):
- """Encodes the current position in the query as a compiled cursor.
-
- Args:
- cc: The compiled cursor to fill out.
- """
- if self.__position[0]:
- position = cc.add_position()
-
- position.set_start_key(self.__position[0])
-
- def _GetResult(self):
- """Returns the next result from the result set, without deduplication.
-
- Returns:
- (path, value): The path and value of the next result.
- """
- if self.__position[1]:
- self.__position = (self.__position[1], None)
-
- if not self.__cursor:
- return None, None
- row = self.__cursor.fetchone()
- if not row:
- self.__cursor = None
- return None, None
- path, data, position_parts = str(row[0]), row[1], row[2:]
- position = ''.join(str(x) for x in position_parts)
- if self.__query.has_end_compiled_cursor() and (
- not self.__query.end_compiled_cursor().position_list() or
- position > self.__query.end_compiled_cursor().position(0).start_key()):
- self.__cursor = None
- return None, None
-
-
- self.__position = (self.__position[0], position)
- return path, data
-
- def _Next(self):
- """Fetches the next unique result from the result set.
-
- Returns:
- A datastore_pb.EntityProto instance.
- """
- if self._HasNext():
- self.__seen.add(self.__next_result[0])
- entity = entity_pb.EntityProto(self.__next_result[1])
- datastore_stub_util.PrepareSpecialPropertiesForLoad(entity)
- self.__next_result = None, None
- return entity
- return None
-
- def _HasNext(self):
- """Prefetches the next result and returns true if successful.
-
- Returns:
- A boolean that indicates if there are more results.
- """
- while self.__cursor and (
- not self.__next_result[0] or self.__next_result[0] in self.__seen):
- self.__next_result = self._GetResult()
-
-
-
- if self.limit is not None and len(self.__seen) >= self.limit:
- return False
-
- if self.__next_result[0]:
- return True
- return False
-
- def Skip(self, count):
- """Skips the specified number of unique results.
-
- Args:
- count: Number of results to skip.
-
- Returns:
- A number indicating how many results where actually skipped.
- """
- for i in xrange(count):
- if not self._Next():
- return i
- return count
-
- def ResumeFromCompiledCursor(self, cc):
- """Resumes a query from a compiled cursor.
-
- Args:
- cc: The compiled cursor to resume from.
- """
-
-
-
- target_position = cc.position(0).start_key()
- if (self.__query.has_end_compiled_cursor() and target_position >=
- self.__query.end_compiled_cursor().position(0).start_key()):
-
- self.__position = (target_position, target_position)
- self.__cursor = None
- return
-
-
- while self.__position[1] <= target_position and self.__cursor:
- self.__next_result = self._GetResult()
-
- def PopulateQueryResult(self, result, count, offset, compile):
- """Populates a QueryResult PB with results from the cursor.
-
- Args:
- result: out: A query_result PB.
- count: The number of results to retrieve.
- offset: The number of results to skip
- """
- limited_offset = min(offset, datastore_stub_util._MAX_QUERY_OFFSET)
- if limited_offset:
- result.set_skipped_results(self.Skip(limited_offset))
-
- if offset == limited_offset:
- if count > datastore_stub_util._MAXIMUM_RESULTS:
- count = datastore_stub_util._MAXIMUM_RESULTS
-
- result_list = result.result_list()
- while len(result_list) < count:
- entity = self._Next()
- if entity is None:
- break
- result_list.append(entity)
-
- result.set_keys_only(self.__query.keys_only())
- result.set_more_results(self._HasNext())
- self.PopulateCursor(result)
- if compile:
- self._EncodeCompiledCursor(result.mutable_compiled_cursor())
-
+ def next(self):
+ row = self.__cursor.next()
+ while str(row[0]) in self.__seen:
+ row = self.__cursor.next()
+ self.__seen.add(str(row[0]))
+ entity = entity_pb.EntityProto(row[1])
+ datastore_stub_util.PrepareSpecialPropertiesForLoad(entity)
+ return entity
def MakeEntityForQuery(query, *path):
"""Make an entity to be returned by a pseudo-kind query.
@@ -440,8 +268,7 @@
for row in c.fetchall():
kinds.append(MakeEntityForQuery(query, self.name, ToUtf8(row[0])))
- cursor = datastore_stub_util.ListCursor(
- query, kinds, datastore_stub_util.CompareEntityPbByKey, set())
+ cursor = datastore_stub_util._ExecuteQuery(kinds, query, [], [])
finally:
self.sqlitestub._ReleaseConnection(conn)
@@ -562,8 +389,7 @@
if property_pb:
properties.append(property_pb)
- cursor = datastore_stub_util.ListCursor(
- query, properties, datastore_stub_util.CompareEntityPbByKey, set())
+ cursor = datastore_stub_util._ExecuteQuery(properties, query, [], [])
finally:
self.sqlitestub._ReleaseConnection(conn)
@@ -620,9 +446,7 @@
ns_id = datastore_types._EMPTY_NAMESPACE_ID
namespace_entities.append(MakeEntityForQuery(query, self.name, ns_id))
- return datastore_stub_util.ListCursor(
- query, namespace_entities, datastore_stub_util.CompareEntityPbByKey,
- set())
+ return datastore_stub_util._ExecuteQuery(namespace_entities, query, [], [])
class DatastoreSqliteStub(datastore_stub_util.BaseDatastore,
@@ -672,7 +496,8 @@
datastore_stub_util.BaseDatastore.__init__(self, require_indexes,
consistency_policy)
apiproxy_stub.APIProxyStub.__init__(self, service_name)
- datastore_stub_util.DatastoreStub.__init__(self, self, app_id, trusted)
+ datastore_stub_util.DatastoreStub.__init__(self, weakref.proxy(self),
+ app_id, trusted)
self.__datastore_file = datastore_file
@@ -696,9 +521,9 @@
self.__query_history = {}
- self._RegisterPseudoKind(KindPseudoKind(self))
- self._RegisterPseudoKind(PropertyPseudoKind(self))
- self._RegisterPseudoKind(NamespacePseudoKind(self))
+ self._RegisterPseudoKind(KindPseudoKind(weakref.proxy(self)))
+ self._RegisterPseudoKind(PropertyPseudoKind(weakref.proxy(self)))
+ self._RegisterPseudoKind(NamespacePseudoKind(weakref.proxy(self)))
try:
self.__Init()
@@ -1090,9 +915,8 @@
orders = [('__path__', datastore_pb.Query_Order.ASCENDING)]
params = []
- query = ('SELECT Entities.__path__, Entities.entity, %s '
+ query = ('SELECT Entities.__path__, Entities.entity '
'FROM "%s!Entities" AS Entities %s %s' % (
- ','.join(x[0] for x in orders),
self._GetTablePrefix(query),
self._CreateFilterString(filters, params),
self.__CreateOrderString(orders)))
@@ -1148,12 +972,11 @@
params = []
format_args = (
- ','.join(x[0] for x in orders[2:]),
prefix,
prefix,
self._CreateFilterString(filters, params),
self.__CreateOrderString(orders))
- query = ('SELECT Entities.__path__, Entities.entity, %s '
+ query = ('SELECT Entities.__path__, Entities.entity '
'FROM "%s!EntitiesByProperty" AS EntitiesByProperty INNER JOIN '
'"%s!Entities" AS Entities USING (__path__) %s %s' % format_args)
return query, params
@@ -1235,12 +1058,11 @@
params = []
format_args = (
- ','.join(x[0] for x in orders),
prefix,
' '.join(joins),
self._CreateFilterString(filters, params),
self.__CreateOrderString(orders))
- query = ('SELECT Entities.__path__, Entities.entity, %s '
+ query = ('SELECT Entities.__path__, Entities.entity '
'FROM "%s!Entities" AS Entities %s %s %s' % format_args)
return query, params
@@ -1377,7 +1199,9 @@
conn = self._GetConnection()
try:
db_cursor = conn.execute(sql_stmt, params)
- cursor = QueryCursor(query, db_cursor)
+ dsquery = datastore_stub_util._MakeQuery(query, filters, orders)
+ cursor = datastore_stub_util.IteratorCursor(
+ query, dsquery, orders, _DedupingEntityIterator(db_cursor))
finally:
self._ReleaseConnection(conn)
return cursor
diff --git a/google/appengine/datastore/datastore_stub_util.py b/google/appengine/datastore/datastore_stub_util.py
index d6d659b..c21acf3 100644
--- a/google/appengine/datastore/datastore_stub_util.py
+++ b/google/appengine/datastore/datastore_stub_util.py
@@ -685,30 +685,44 @@
"""A base query cursor over a list of entities.
Public properties:
- cursor: the integer cursor
- app: the app for which this cursor was created
+ cursor: the integer cursor.
+ app: the app for which this cursor was created.
+ keys_only: whether the query is keys_only.
Class attributes:
- _next_cursor: the next cursor to allocate
- _next_cursor_lock: protects _next_cursor
+ _next_cursor: the next cursor to allocate.
+ _next_cursor_lock: protects _next_cursor.
"""
_next_cursor = 1
_next_cursor_lock = threading.Lock()
- def __init__(self, app):
+ def __init__(self, query, dsquery, orders):
"""Constructor.
Args:
- app: The app this cursor is being created for.
+ query: the query request proto.
+ dsquery: a datastore_query.Query over query.
+ orders: the orders of query as returned by _GuessOrders.
"""
- self.app = app
+
+ self.keys_only = query.keys_only()
+ self.app = query.app()
self.cursor = self._AcquireCursorID()
- def PopulateCursor(self, query_result):
+ self.__order_compare_entities = dsquery._order.cmp_for_filter(
+ dsquery._filter_predicate)
+ self.__order_property_names = set(
+ order.property() for order in orders if order.property() != '__key__')
+
+ def _PopulateResultMetadata(self, query_result, compile, last_result):
+ query_result.set_keys_only(self.keys_only)
if query_result.more_results():
cursor = query_result.mutable_cursor()
cursor.set_app(self.app)
cursor.set_cursor(self.cursor)
+ if compile:
+ self._EncodeCompiledCursor(last_result,
+ query_result.mutable_compiled_cursor())
@classmethod
def _AcquireCursorID(cls):
@@ -721,100 +735,18 @@
cls._next_cursor_lock.release()
return cursor_id
-
-class ListCursor(BaseCursor):
- """A query cursor over a list of entities.
-
- Public properties:
- keys_only: whether the query is keys_only
- """
-
- def __init__(self, query, results, order_compare_entities,
- order_property_names):
- """Constructor.
+ def _IsBeforeCursor(self, entity, cursor):
+ """True if entity is before cursor according to the current order.
Args:
- query: the query request proto
- results: list of datastore_pb.EntityProto
- order_compare_entities: a __cmp__ function for datastore_pb.EntityProto
- that follows sort order as specified by the query
- order_property_names: a set of the names of properties used in
- order_compare_entities.
+ entity: a datastore_pb.EntityProto entity.
+ cursor: a compiled cursor as returned by _DecodeCompiledCursor.
"""
- super(ListCursor, self).__init__(query.app())
-
- self.__order_property_names = order_property_names
- if query.has_compiled_cursor() and query.compiled_cursor().position_list():
- self.__last_result, inclusive = (self._DecodeCompiledCursor(
- query.compiled_cursor()))
- start_cursor_position = ListCursor._GetCursorOffset(
- results, self.__last_result, inclusive, order_compare_entities)
+ x = self.__order_compare_entities(entity, cursor[0])
+ if cursor[1]:
+ return x < 0
else:
- self.__last_result = None
- start_cursor_position = 0
-
- if query.has_end_compiled_cursor():
- if query.end_compiled_cursor().position_list():
- end_cursor_entity, inclusive = self._DecodeCompiledCursor(
- query.end_compiled_cursor())
- end_cursor_position = ListCursor._GetCursorOffset(
- results, end_cursor_entity, inclusive, order_compare_entities)
- else:
- end_cursor_position = 0
- else:
- end_cursor_position = len(results)
-
-
- results = results[start_cursor_position:end_cursor_position]
-
-
- if query.has_limit():
- limit = query.limit()
- if query.offset():
- limit += query.offset()
- if limit >= 0 and limit < len(results):
- results = results[:limit]
-
- self.__results = results
- self.__offset = 0
- self.__count = len(self.__results)
-
-
- self.keys_only = query.keys_only()
-
- @staticmethod
- def _GetCursorOffset(results, cursor_entity, inclusive, compare):
- """Converts a cursor entity into a offset into the result set even if the
- cursor_entity no longer exists.
-
- Args:
- results: the query's results (sequence of datastore_pb.EntityProto)
- cursor_entity: the datastore_pb.EntityProto from the compiled query
- inclusive: boolean that specifies if to offset past the cursor_entity
- compare: a function that takes two datastore_pb.EntityProto and compares
- them.
- Returns:
- the integer offset
- """
- lo = 0
- hi = len(results)
- if inclusive:
-
- while lo < hi:
- mid = (lo + hi) // 2
- if compare(results[mid], cursor_entity) < 0:
- lo = mid + 1
- else:
- hi = mid
- else:
-
- while lo < hi:
- mid = (lo + hi) // 2
- if compare(cursor_entity, results[mid]) < 0:
- hi = mid
- else:
- lo = mid + 1
- return lo
+ return x <= 0
def _DecodeCompiledCursor(self, compiled_cursor):
"""Converts a compiled_cursor into a cursor_entity.
@@ -848,35 +780,214 @@
return (cursor_entity, position.start_inclusive())
- def _EncodeCompiledCursor(self, compiled_cursor):
+ def _EncodeCompiledCursor(self, last_result, compiled_cursor):
"""Converts the current state of the cursor into a compiled_cursor.
Args:
- query: the datastore_pb.Query this cursor is related to
- compiled_cursor: an empty datstore_pb.CompiledCursor
+ last_result: the last result returned by this query.
+ compiled_cursor: an empty datstore_pb.CompiledCursor.
"""
- if self.__last_result is not None:
+ if last_result is not None:
position = compiled_cursor.add_position()
- position.mutable_key().MergeFrom(self.__last_result.key())
- for prop in self.__last_result.property_list():
+ position.mutable_key().MergeFrom(last_result.key())
+ for prop in last_result.property_list():
if prop.name() in self.__order_property_names:
indexvalue = position.add_indexvalue()
indexvalue.set_property(prop.name())
indexvalue.mutable_value().CopyFrom(prop.value())
position.set_start_inclusive(False)
- def Count(self):
- """Counts results, up to the query's limit.
- Note this method does not deduplicate results, so the query it was generated
- from should have the 'distinct' clause applied.
+class IteratorCursor(BaseCursor):
+ """A query cursor over an entity iterator."""
- Returns:
- int: Result count.
+ def __init__(self, query, dsquery, orders, results):
+ """Constructor.
+
+ Args:
+ query: the query request proto
+ dsquery: a datastore_query.Query over query.
+ orders: the orders of query as returned by _GuessOrders.
+ results: iterator over datastore_pb.EntityProto
"""
- return self.__count
+ super(IteratorCursor, self).__init__(query, dsquery, orders)
+
+ self.__last_result = None
+ self.__next_result = None
+ self.__results = results
+ self.__done = False
+
+
+ if query.has_end_compiled_cursor():
+ if query.end_compiled_cursor().position_list():
+ self.__end_cursor = self._DecodeCompiledCursor(
+ query.end_compiled_cursor())
+ else:
+ self.__done = True
+ else:
+ self.__end_cursor = None
+
+ if query.has_compiled_cursor() and query.compiled_cursor().position_list():
+ start_cursor = self._DecodeCompiledCursor(query.compiled_cursor())
+ self.__last_result = start_cursor[0]
+ try:
+ self._Advance()
+ while self._IsBeforeCursor(self.__next_result, start_cursor):
+ self._Advance()
+ except StopIteration:
+ pass
+
+
+ self.__offset = 0
+ self.__limit = None
+ if query.has_limit():
+ limit = query.limit()
+ if query.offset():
+ limit += query.offset()
+ if limit >= 0:
+ self.__limit = limit
+
+ def _Done(self):
+ self.__done = True
+ self.__next_result = None
+ raise StopIteration
+
+ def _Advance(self):
+ """Advance to next result (handles end cursor, ignores limit)."""
+ if self.__done:
+ raise StopIteration
+ try:
+ self.__next_result = self.__results.next()
+ except StopIteration:
+ self._Done()
+ if (self.__end_cursor and
+ not self._IsBeforeCursor(self.__next_result, self.__end_cursor)):
+ self._Done()
+
+ def _GetNext(self):
+ """Ensures next result is fetched."""
+ if self.__limit is not None and self.__offset >= self.__limit:
+ self._Done()
+ if self.__next_result is None:
+ self._Advance()
+
+ def _Next(self):
+ """Returns and consumes next result."""
+ self._GetNext()
+ self.__last_result = self.__next_result
+ self.__next_result = None
+ self.__offset += 1
+ return self.__last_result
+
+ def PopulateQueryResult(self, result, count, offset, compile=False):
+ """Populates a QueryResult with this cursor and the given number of results.
+
+ Args:
+ result: datastore_pb.QueryResult
+ count: integer of how many results to return
+ offset: integer of how many results to skip
+ compile: boolean, whether we are compiling this query
+ """
+ skipped = 0
+ try:
+ limited_offset = min(offset, _MAX_QUERY_OFFSET)
+ while skipped < limited_offset:
+ self._Next()
+ skipped += 1
+
+
+
+
+
+
+
+ if skipped == offset:
+ if count > _MAXIMUM_RESULTS:
+ count = _MAXIMUM_RESULTS
+ while count > 0:
+ result.result_list().append(LoadEntity(self._Next()))
+ count -= 1
+
+ self._GetNext()
+ except StopIteration:
+ pass
+
+ result.set_more_results(not self.__done)
+ result.set_skipped_results(skipped)
+ self._PopulateResultMetadata(result, compile, self.__last_result)
+
+
+class ListCursor(BaseCursor):
+ """A query cursor over a list of entities.
+
+ Public properties:
+ keys_only: whether the query is keys_only
+ """
+
+ def __init__(self, query, dsquery, orders, results):
+ """Constructor.
+
+ Args:
+ query: the query request proto
+ dsquery: a datastore_query.Query over query.
+ orders: the orders of query as returned by _GuessOrders.
+ results: list of datastore_pb.EntityProto
+ """
+ super(ListCursor, self).__init__(query, dsquery, orders)
+
+ if query.has_compiled_cursor() and query.compiled_cursor().position_list():
+ start_cursor = self._DecodeCompiledCursor(query.compiled_cursor())
+ self.__last_result = start_cursor[0]
+ start_cursor_position = self._GetCursorOffset(results, start_cursor)
+ else:
+ self.__last_result = None
+ start_cursor_position = 0
+
+ if query.has_end_compiled_cursor():
+ if query.end_compiled_cursor().position_list():
+ end_cursor = self._DecodeCompiledCursor(query.end_compiled_cursor())
+ end_cursor_position = self._GetCursorOffset(results, end_cursor)
+ else:
+ end_cursor_position = 0
+ else:
+ end_cursor_position = len(results)
+
+
+ results = results[start_cursor_position:end_cursor_position]
+
+
+ if query.has_limit():
+ limit = query.limit()
+ if query.offset():
+ limit += query.offset()
+ if limit >= 0 and limit < len(results):
+ results = results[:limit]
+
+ self.__results = results
+ self.__offset = 0
+ self.__count = len(self.__results)
+
+ def _GetCursorOffset(self, results, cursor):
+ """Converts a cursor into a offset into the result set even if the
+ cursor's entity no longer exists.
+
+ Args:
+ results: the query's results (sequence of datastore_pb.EntityProto)
+ cursor: a compiled cursor as returned by _DecodeCompiledCursor
+ Returns:
+ the integer offset
+ """
+ lo = 0
+ hi = len(results)
+ while lo < hi:
+ mid = (lo + hi) // 2
+ if self._IsBeforeCursor(results[mid], cursor):
+ lo = mid + 1
+ else:
+ hi = mid
+ return lo
def PopulateQueryResult(self, result, count, offset, compile=False):
"""Populates a QueryResult with this cursor and the given number of results.
@@ -912,11 +1023,8 @@
self.__last_result = self.__results[self.__offset - 1]
- result.set_keys_only(self.keys_only)
result.set_more_results(self.__offset < self.__count)
- self.PopulateCursor(result)
- if compile:
- self._EncodeCompiledCursor(result.mutable_compiled_cursor())
+ self._PopulateResultMetadata(result, compile, self.__last_result)
def _SynchronizeTxn(function):
@@ -1078,7 +1186,7 @@
Check(query.has_ancestor(),
'Query must have an ancestor when performed in a transaction.')
self._CheckOrSetSnapshot(query.ancestor())
- return _GetQueryCursor(self._snapshot.values(), query, filters, orders)
+ return _ExecuteQuery(self._snapshot.values(), query, filters, orders)
@_SynchronizeTxn
def Put(self, entity, insert):
@@ -1423,8 +1531,8 @@
raise TypeError('policy should be of type '
'datastore_stub_util.BaseConsistencyPolicy found %r.' %
(policy,))
+ self.Flush()
self._consistency_policy = policy
- self.Clear()
def Clear(self):
"""Discards any pending transactions and resets the meta_data."""
@@ -1464,7 +1572,7 @@
CheckTransaction(request_trusted, request_app, transaction)
txn = self._txn_map.get(transaction.handle())
Check(txn and txn._app == transaction.app(),
- 'Transaction %s not found' % transaction)
+ 'Transaction(<%s>) not found' % str(transaction).replace('\n', ', '))
return txn
def Groom(self):
@@ -1879,7 +1987,7 @@
grouped_keys = collections.defaultdict(list)
for i, key in enumerate(raw_keys):
- CheckReference(calling_app, trusted, key)
+ CheckReference(trusted, calling_app, key)
entity_group = _GetEntityGroup(key)
entity_group_key = datastore_types.ReferenceToKeyValue(entity_group)
grouped_keys[entity_group_key].append((key, i))
@@ -2327,20 +2435,32 @@
return orders
-def _GetQueryCursor(results, query, filters, orders):
- """Get the generate a cursor for the given datastore_pb.Query."""
- orders = _GuessOrders(filters, orders)
+def _MakeQuery(query, filters, orders):
+ """Make a datastore_query.Query for the given datastore_pb.Query.
+ Overrides filters and orders in query with the specified arguments."""
clone = datastore_pb.Query()
clone.CopyFrom(query)
clone.clear_filter()
clone.clear_order()
clone.filter_list().extend(filters)
clone.order_list().extend(orders)
+ return datastore_query.Query._from_pb(clone)
- dsquery = datastore_query.Query._from_pb(clone)
- cursor = ListCursor(query, datastore_query.apply_query(dsquery, results),
- dsquery._order.cmp_for_filter(dsquery._filter_predicate),
- set(order.property() for order in orders
- if order.property() != '__key__'))
- return cursor
+
+def _ExecuteQuery(results, query, filters, orders):
+ """Executes the query on a superset of its results.
+
+ Args:
+ results: superset of results for query.
+ query: a datastore_pb.Query.
+ filters: the filters from query.
+ orders: the orders from query.
+
+ Returns:
+ A ListCursor over the results of applying query to results.
+ """
+ orders = _GuessOrders(filters, orders)
+ dsquery = _MakeQuery(query, filters, orders)
+ return ListCursor(query, dsquery, orders,
+ datastore_query.apply_query(dsquery, results))
diff --git a/google/appengine/datastore/document_pb.py b/google/appengine/datastore/document_pb.py
index 9826e39..56929a8 100644
--- a/google/appengine/datastore/document_pb.py
+++ b/google/appengine/datastore/document_pb.py
@@ -205,6 +205,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.FieldValue'
class Field(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -347,6 +348,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Field'
class FieldTypes(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
@@ -489,6 +491,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.FieldTypes'
class Document(ProtocolBuffer.ProtocolMessage):
@@ -608,29 +611,23 @@
def IsInitialized(self, debug_strs=None):
initialized = 1
- if (not self.has_id_):
- initialized = 0
- if debug_strs is not None:
- debug_strs.append('Required field: id not set.')
for p in self.field_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
- n += self.lengthString(len(self.id_))
+ if (self.has_id_): n += 1 + self.lengthString(len(self.id_))
if (self.has_language_): n += 1 + self.lengthString(len(self.language_))
n += 1 * len(self.field_)
for i in xrange(len(self.field_)): n += self.lengthString(self.field_[i].ByteSize())
if (self.has_order_id_): n += 1 + self.lengthVarInt64(self.order_id_)
if (self.has_storage_): n += 1 + self.lengthVarInt64(self.storage_)
- return n + 1
+ return n
def ByteSizePartial(self):
n = 0
- if (self.has_id_):
- n += 1
- n += self.lengthString(len(self.id_))
+ if (self.has_id_): n += 1 + self.lengthString(len(self.id_))
if (self.has_language_): n += 1 + self.lengthString(len(self.language_))
n += 1 * len(self.field_)
for i in xrange(len(self.field_)): n += self.lengthString(self.field_[i].ByteSizePartial())
@@ -646,8 +643,9 @@
self.clear_storage()
def OutputUnchecked(self, out):
- out.putVarInt32(10)
- out.putPrefixedString(self.id_)
+ if (self.has_id_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.id_)
if (self.has_language_):
out.putVarInt32(18)
out.putPrefixedString(self.language_)
@@ -754,5 +752,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Document'
__all__ = ['FieldValue','Field','FieldTypes','Document']
diff --git a/google/appengine/datastore/entity_pb.py b/google/appengine/datastore/entity_pb.py
index 8b32943..5c033ef 100644
--- a/google/appengine/datastore/entity_pb.py
+++ b/google/appengine/datastore/entity_pb.py
@@ -1096,6 +1096,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.PropertyValue'
class Property(ProtocolBuffer.ProtocolMessage):
@@ -1386,6 +1387,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Property'
class Path_Element(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
@@ -1648,6 +1650,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Path'
class Reference(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
@@ -1824,6 +1827,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Reference'
class User(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
@@ -2137,6 +2141,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.User'
class EntityProto(ProtocolBuffer.ProtocolMessage):
@@ -2522,6 +2527,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.EntityProto'
class CompositeProperty(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
@@ -2664,6 +2670,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeProperty'
class Index_Property(ProtocolBuffer.ProtocolMessage):
@@ -2981,6 +2988,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.Index'
class CompositeIndex(ProtocolBuffer.ProtocolMessage):
@@ -3218,5 +3226,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'storage_onestore_v3.CompositeIndex'
__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex']
diff --git a/google/appengine/ext/admin/__init__.py b/google/appengine/ext/admin/__init__.py
index 2176759..70694f4 100755
--- a/google/appengine/ext/admin/__init__.py
+++ b/google/appengine/ext/admin/__init__.py
@@ -33,6 +33,7 @@
import csv
import cStringIO
import datetime
+import decimal
import logging
import math
import mimetypes
@@ -75,7 +76,7 @@
from google.appengine.ext import webapp
from google.appengine.ext.admin import datastore_stats_generator
from google.appengine.ext.db import metadata
-from google.appengine.ext.webapp import template
+from google.appengine.ext.webapp import _template
_DEBUG = True
@@ -86,6 +87,48 @@
_EtaDelta = taskqueue_stub._EtaDelta
+
+
+class _AhAdminXsrfToken_(db.Model):
+ """Model class used to persist the XSRF token."""
+
+ XSRF_KEY_NAME = '_XSRF_'
+
+ xsrf_token = db.StringProperty()
+
+
+def get_xsrf_token():
+ """Return the XSRF token.
+
+ This is read from the datastore. If no token is present in the
+ datastore, we create a random token and insert it.
+ """
+ entity = _AhAdminXsrfToken_.get_by_key_name(_AhAdminXsrfToken_.XSRF_KEY_NAME)
+ if not entity:
+ randints = ['%08x' % (random.randrange(-2**31, 2**31-1) & (2**32-1))
+ for i in range(6)]
+ xsrf_token = '_'.join(randints)
+ entity = _AhAdminXsrfToken_(key_name=_AhAdminXsrfToken_.XSRF_KEY_NAME,
+ xsrf_token=xsrf_token)
+ entity.put()
+ return entity.xsrf_token
+
+
+def xsrf_required(method):
+ """Decorator to protect post() handlers against XSRF attacks."""
+ def xsrf_required_decorator(self):
+ expected_token = get_xsrf_token()
+ actual_token = self.request.get('xsrf_token')
+ if actual_token != expected_token:
+ self.response.set_status(403, 'Invalid XSRF token')
+ self.response.out.write('<h1>Invalid XSRF token</h1>\n' +
+ '<p>Please reload the form page</n>\n' +
+ ' '*512)
+ else:
+ method(self)
+ return xsrf_required_decorator
+
+
def ustr(value):
"""Like str(), but UTF-8-encodes Unicode instead of failing."""
try:
@@ -156,6 +199,7 @@
'xmpp_path': base_path + XMPPPageHandler.PATH,
'inboundmail_path': base_path + InboundMailPageHandler.PATH,
'backends_path': base_path + BackendsPageHandler.PATH,
+ 'xsrf_token': get_xsrf_token(),
}
if HAVE_CRON:
values['cron_path'] = base_path + CronPageHandler.PATH
@@ -163,7 +207,7 @@
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
- self.response.out.write(template.render(path, values, debug=_DEBUG))
+ self.response.out.write(_template.render(path, values, debug=_DEBUG))
def base_path(self):
"""Returns the base path of this admin app, which is chosen by the user.
@@ -234,6 +278,7 @@
PATH = InteractivePageHandler.PATH + '/execute'
+ @xsrf_required
def post(self):
save_stdout = sys.stdout
@@ -466,6 +511,7 @@
values = {'queues': self.helper.get_queues(now)}
self.generate('queues.html', values)
+ @xsrf_required
def post(self):
"""Handle modifying actions and/or redirect to GET page."""
queue_name = self.request.get('queue')
@@ -614,6 +660,7 @@
}
self.generate('tasks.html', values)
+ @xsrf_required
def post(self):
self.parse_arguments()
self.task_name = self.request.get('task')
@@ -698,6 +745,7 @@
}
self.generate('backend.html', values)
+ @xsrf_required
def post(self):
if self.request.get('action:startbackend'):
self.stub.start_backend(self.request.get('backend'))
@@ -864,6 +912,7 @@
urllib.quote_plus(v.encode('utf8')))
for k, v in query.iteritems())
+ @xsrf_required
def post(self):
"""Handle modifying actions and/or redirect to GET page."""
next_param = {}
@@ -1035,6 +1084,68 @@
PATH = '/datastore'
+ _ONE_MILLION = decimal.Decimal(1000000)
+
+ _DOLLARS_PER_WRITE = 1/_ONE_MILLION
+
+ _PENNIES_PER_WRITE = _DOLLARS_PER_WRITE/100
+
+ def _writes_to_pennies(self, writes):
+ return self._PENNIES_PER_WRITE * writes
+
+ def _calculate_writes_for_built_in_indices(self, entity):
+ writes = 0
+ for prop_name in entity.keys():
+ if not prop_name in entity.unindexed_properties():
+
+
+ prop_vals = entity[prop_name]
+ if isinstance(prop_vals, (list)):
+ num_prop_vals = len(prop_vals)
+ else:
+ num_prop_vals = 1
+ writes += 2 * num_prop_vals
+ return writes
+
+ def _calculate_writes_for_composite_index(self, entity, index):
+ composite_index_value_count = 1
+ for prop_name, _ in index.Properties():
+ if not prop_name in entity.keys() or (
+ prop_name in entity.unindexed_properties()):
+ return 0
+ prop_vals = entity[prop_name]
+ if isinstance(prop_vals, (list)):
+ composite_index_value_count = (
+ composite_index_value_count * len(prop_vals))
+
+
+
+
+
+ ancestor_count = 1
+ if index.HasAncestor():
+ key = entity.key().parent()
+ while key != None:
+ ancestor_count = ancestor_count + 1
+ key = key.parent()
+ return composite_index_value_count * ancestor_count
+
+ def _get_write_ops(self, entity):
+
+ writes = 2 + self._calculate_writes_for_built_in_indices(entity)
+
+
+ for index, _ in datastore.GetIndexes():
+ if index.Kind() != entity.kind():
+ continue
+ writes = writes + self._calculate_writes_for_composite_index(
+ entity, index)
+ return writes
+
+ def _get_creation_cost_analysis(self, entity):
+ write_ops = self._get_write_ops(entity)
+ return (write_ops, self._writes_to_pennies(write_ops))
+
def get_kinds(self, namespace):
"""Get sorted list of kind names the datastore knows about.
@@ -1081,6 +1192,7 @@
entities = []
edit_path = self.base_path() + DatastoreEditHandler.PATH
for entity in result_set:
+ write_ops = self._get_write_ops(entity)
attributes = []
for key in keys:
if entity.has_key(key):
@@ -1103,6 +1215,7 @@
'key': ustr(entity.key()),
'key_name': ustr(entity.key().name()),
'key_id': entity.key().id(),
+ 'write_ops' : write_ops,
'shortened_key': str(entity.key())[:8] + '...',
'attributes': attributes,
'edit_uri': edit_path + '?key=' + str(entity.key()) + '&kind=' + urllib.quote(ustr(self.request.get('kind'))) + '&next=' + urllib.quote(ustr(self.request.uri)),
@@ -1114,7 +1227,7 @@
max_pager_links = 8
current_page = start / num
num_pages = int(math.ceil(total * 1.0 / num))
- page_start = max(math.floor(current_page - max_pager_links / 2), 0)
+ page_start = max(int(math.floor(current_page - max_pager_links / 2)), 0)
page_end = min(page_start + max_pager_links, num_pages)
pages = []
@@ -1170,6 +1283,7 @@
PATH = DatastoreQueryHandler.PATH + '/batchedit'
+ @xsrf_required
def post(self):
"""Handle POST."""
kind = self.request.get('kind')
@@ -1303,6 +1417,7 @@
'parent_key_string': parent_key_string,
})
+ @xsrf_required
def post(self):
kind = self.request.get('kind')
@@ -1363,6 +1478,7 @@
'msg': self.request.get('msg', None)}
self.generate('datastore_stats.html', values)
+ @xsrf_required
def post(self):
"""Handle actions and redirect to GET page."""
app_id = self.request.get('app_id', None)
@@ -1862,15 +1978,5 @@
wsgiref.handlers.CGIHandler().run(application)
-
-
-import django
-if django.VERSION[:2] < (0, 97):
- from django.template import defaultfilters
- def safe(text, dummy=None):
- return text
- defaultfilters.register.filter("safe", safe)
-
-
if __name__ == '__main__':
main()
diff --git a/google/appengine/ext/admin/templates/datastore.html b/google/appengine/ext/admin/templates/datastore.html
index 92bc75a..7c0d394 100644
--- a/google/appengine/ext/admin/templates/datastore.html
+++ b/google/appengine/ext/admin/templates/datastore.html
@@ -158,6 +158,7 @@
{% if entities %}
<form action="{{ datastore_batch_edit_path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="kind" value="{{ kind|escape }}"/>
<input type="hidden" name="numkeys" value="{{ entities|length }}"/>
<input type="hidden" name="next" value="{{ start_base_url }}"/>
@@ -167,6 +168,7 @@
<tr>
<th><input id="allkeys" type="checkbox" onclick="checkAllEntities();"/></th>
<th>Key</th>
+ <th>Write Ops</th>
<th>ID</th>
<th>Key Name</th>
{% for header in headers %}
@@ -178,6 +180,9 @@
<td><input id="key{{ forloop.counter }}" type="checkbox" name="key{{ forloop.counter }}" value="{{ entity.key|escape }}" onclick="updateDeleteButtonAndCheckbox();"/></td>
<td onclick="location.href='{{ entity.edit_uri|escape }}'"><a href="{{ entity.edit_uri|escape }}" title="Edit entity #{{ entity.key|escape }}" onclick="return false">{{ entity.shortened_key|escape }}</a></td>
<td>
+ {{entity.write_ops}}
+ </td>
+ <td>
{% if entity.key_id %}
{{entity.key_id}}
{% endif %}
@@ -239,5 +244,3 @@
//]]>
</script>
{% endblock %}
-
-
diff --git a/google/appengine/ext/admin/templates/datastore_edit.html b/google/appengine/ext/admin/templates/datastore_edit.html
index 0763940..47daaad 100644
--- a/google/appengine/ext/admin/templates/datastore_edit.html
+++ b/google/appengine/ext/admin/templates/datastore_edit.html
@@ -66,7 +66,8 @@
<h3>{% if key %}Edit Entity{% else %}New Entity{% endif %}</h3>
<form action="{{ request.path }}" method="post" onsubmit="return clearHints(this)">
- <div><input type="hidden" name="next" value="{{ next }}"/></div>
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
+ <input type="hidden" name="next" value="{{ next }}"/>
<table class="form">
{% if key %}
{% if namespace %}
diff --git a/google/appengine/ext/admin/templates/datastore_stats.html b/google/appengine/ext/admin/templates/datastore_stats.html
index 0aa1282..6f0f1e1 100644
--- a/google/appengine/ext/admin/templates/datastore_stats.html
+++ b/google/appengine/ext/admin/templates/datastore_stats.html
@@ -33,15 +33,15 @@
<p>
<form action="{{ datastore_stats_path }}" method="post">
<div id="entities-control">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
{% if app_id %}
<input type="hidden" name="app_id" value="{{ app_id }}"/>
{% endif %}
<input type="hidden" name="action:compute_stats" value="1"/>
<input id="action:compute_stats" type="submit"
value="Generate Datastore Stats{% if app_id %} for {{ app_id }}{% endif %}"
- onclick="return confirm('This may take a while depending on the size of your datastore. Contiue?')" />
+ onclick="return confirm('This may take a while depending on the size of your datastore. Continue?')" />
</div>
</form>
{% endblock %}
-
diff --git a/google/appengine/ext/admin/templates/inboundmail.html b/google/appengine/ext/admin/templates/inboundmail.html
index 44cb389..37e192b 100644
--- a/google/appengine/ext/admin/templates/inboundmail.html
+++ b/google/appengine/ext/admin/templates/inboundmail.html
@@ -116,6 +116,7 @@
action="/_ah/mail/" method="post"
onsubmit="sendInboundMailWebhook(); return false">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="payload" id="payload">
<input type="hidden" id="content-type" name="header:Content-Type" value="message/rfc822">
<input type="hidden" id="content-length" name="header:Content-Length">
diff --git a/google/appengine/ext/admin/templates/interactive.html b/google/appengine/ext/admin/templates/interactive.html
index 78667e7..11ee8fe 100644
--- a/google/appengine/ext/admin/templates/interactive.html
+++ b/google/appengine/ext/admin/templates/interactive.html
@@ -53,6 +53,7 @@
{% block body %}
<h3>Interactive Console</h3>
<form action="{{ interactive_execute_path }}" target="output" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<table id="console">
<tr>
<td>
diff --git a/google/appengine/ext/admin/templates/memcache.html b/google/appengine/ext/admin/templates/memcache.html
index 55f869a..772e629 100644
--- a/google/appengine/ext/admin/templates/memcache.html
+++ b/google/appengine/ext/admin/templates/memcache.html
@@ -25,6 +25,7 @@
<li>Hit ratio: {{ hitratio }}% ({{ stats.hits }} hit{{ stats.hits|pluralize }} and {{ stats.misses }} miss{{ stats.misses|pluralize:"es" }})</li>
<li>Size of cache: {{ stats.items }} item{{ stats.items|pluralize }}, {{ stats.bytes|filesizeformat }}
<form id="flush_form" action="{{ request.path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="submit" name="action:flush" value="Flush Cache" onclick="return confirm('Are you sure you want to flush all keys from the cache?');"/>
</form>
</li>
@@ -34,6 +35,7 @@
<div id="memcache_search">
<form action="{{ request.path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<span class="field">
<span class="name">Key:</span>
<span class="value"><input id="key_input" name="key" type="text" size="40" value="{{ key|escape }}"/></span>
@@ -65,6 +67,7 @@
{% if show_valueform %}
<div id="memcache_edit">
<form action="{{ request.path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<table>
<tr>
<th>Key</th>
diff --git a/google/appengine/ext/admin/templates/queues.html b/google/appengine/ext/admin/templates/queues.html
index 42f0894..1265ec5 100644
--- a/google/appengine/ext/admin/templates/queues.html
+++ b/google/appengine/ext/admin/templates/queues.html
@@ -56,6 +56,7 @@
</td>
<td valign="top">
<form action="{{ queues_path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="queue" value="{{ queue.name|escape }}"/>
<input type="submit" name="action:purgequeue" value="Purge Queue"
onclick="return confirm('Are you sure you want to purge all ' +
@@ -72,4 +73,3 @@
{% endblock %}
-
diff --git a/google/appengine/ext/admin/templates/tasks.html b/google/appengine/ext/admin/templates/tasks.html
index 45710b1..f7f09e7 100644
--- a/google/appengine/ext/admin/templates/tasks.html
+++ b/google/appengine/ext/admin/templates/tasks.html
@@ -69,6 +69,7 @@
</td>
<td valign="top">
<form id="runform.{{ task.name|escape }}" action="{{ task.url|escape }}" method="{{ task.method|escape }}" onsubmit="(new Webhook('runform.{{ task.name|escape }}')).run(handleTaskResult); return false">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="payload" value="{{ task.body|escape }}">
{% for header in task.headers %}
<input type="hidden" name="header:{{ header.0|escape }}"
@@ -79,6 +80,7 @@
</td>
<td valign="top">
<form id="deleteform.{{ task.name|escape }}" action="{{ tasks_path }}" method="post">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="queue" value="{{ queue|escape }}"/>
<input type="hidden" name="task" value="{{ task.name|escape }}"/>
<input type="hidden" name="start_name" value="{{ this_page.start_name|escape }}"/>
@@ -116,4 +118,3 @@
{% endblock %}
-
diff --git a/google/appengine/ext/admin/templates/xmpp.html b/google/appengine/ext/admin/templates/xmpp.html
index 5067c1e..5cac66a 100644
--- a/google/appengine/ext/admin/templates/xmpp.html
+++ b/google/appengine/ext/admin/templates/xmpp.html
@@ -151,6 +151,7 @@
action="/_ah/xmpp/message/chat/" method="post"
onsubmit="sendXmppWebhook(); return false">
+ <input type="hidden" name="xsrf_token" value="{{ xsrf_token }}"/>
<input type="hidden" name="payload" id="payload">
<input type="hidden" id="content-type" name="header:Content-Type">
diff --git a/google/appengine/ext/appstats/datamodel_pb.py b/google/appengine/ext/appstats/datamodel_pb.py
index 919d963..9bb8935 100644
--- a/google/appengine/ext/appstats/datamodel_pb.py
+++ b/google/appengine/ext/appstats/datamodel_pb.py
@@ -162,6 +162,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.AggregateRpcStatsProto'
class KeyValProto(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
@@ -300,6 +301,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.KeyValProto'
class StackFrameProto(ProtocolBuffer.ProtocolMessage):
has_class_or_file_name_ = 0
class_or_file_name_ = ""
@@ -525,6 +527,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.StackFrameProto'
class IndividualRpcStatsProto(ProtocolBuffer.ProtocolMessage):
has_service_call_name_ = 0
service_call_name_ = ""
@@ -920,6 +923,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.IndividualRpcStatsProto'
class RequestStatProto(ProtocolBuffer.ProtocolMessage):
has_start_timestamp_milliseconds_ = 0
start_timestamp_milliseconds_ = 0
@@ -1523,5 +1527,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.RequestStatProto'
__all__ = ['AggregateRpcStatsProto','KeyValProto','StackFrameProto','IndividualRpcStatsProto','RequestStatProto']
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index ff2d952..ced20d1 100755
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -3,77 +3,78 @@
typeof a.length=="number"&&typeof a.splice!="undefined"&&typeof a.propertyIsEnumerable!="undefined"&&!a.propertyIsEnumerable("splice"))return"array";if(c=="[object Function]"||typeof a.call!="undefined"&&typeof a.propertyIsEnumerable!="undefined"&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if(b=="function"&&typeof a.call=="undefined")return"object";return b},ea=function(a){return da(a)=="array"},fa=function(a){var b=da(a);return b=="array"||b=="object"&&typeof a.length==
"number"},p=function(a){return typeof a=="string"},q=function(a){return da(a)=="function"},ga=function(a){a=da(a);return a=="object"||a=="array"||a=="function"},s=function(a){return a[ha]||(a[ha]=++ia)},ha="closure_uid_"+Math.floor(Math.random()*2147483648).toString(36),ia=0,ja=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=Array.prototype.slice.call(arguments);b.unshift.apply(b,c);return a.apply(this,b)}},t=function(a,b){function c(){}c.prototype=b.prototype;
a.c=b.prototype;a.prototype=new c;a.prototype.constructor=a};var ka=function(a){this.stack=Error().stack||"";if(a)this.message=String(a)};t(ka,Error);ka.prototype.name="CustomError";var la=function(a,b){for(var c=1;c<arguments.length;c++)var d=String(arguments[c]).replace(/\$/g,"$$$$"),a=a.replace(/\%s/,d);return a},ma=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},sa=function(a){if(!na.test(a))return a;a.indexOf("&")!=-1&&(a=a.replace(oa,"&"));a.indexOf("<")!=-1&&(a=a.replace(pa,"<"));a.indexOf(">")!=-1&&(a=a.replace(qa,">"));a.indexOf('"')!=-1&&(a=a.replace(ra,"""));return a},oa=/&/g,pa=/</g,qa=/>/g,ra=/\"/g,na=/[&<>\"]/,ua=function(a,b){for(var c=
-0,d=ma(String(a)).split("."),f=ma(String(b)).split("."),g=Math.max(d.length,f.length),h=0;c==0&&h<g;h++){var j=d[h]||"",k=f[h]||"",l=RegExp("(\\d*)(\\D*)","g"),L=RegExp("(\\d*)(\\D*)","g");do{var r=l.exec(j)||["","",""],I=L.exec(k)||["","",""];if(r[0].length==0&&I[0].length==0)break;c=ta(r[1].length==0?0:parseInt(r[1],10),I[1].length==0?0:parseInt(I[1],10))||ta(r[2].length==0,I[2].length==0)||ta(r[2],I[2])}while(c==0)}return c},ta=function(a,b){if(a<b)return-1;else if(a>b)return 1;return 0};var va=function(a,b){b.unshift(a);ka.call(this,la.apply(m,b));b.shift();this.lc=a};t(va,ka);va.prototype.name="AssertionError";var wa=function(a,b,c){if(!a){var d=Array.prototype.slice.call(arguments,2),f="Assertion failed";if(b){f+=": "+b;var g=d}e(new va(""+f,g||[]))}};var u=Array.prototype,xa=u.indexOf?function(a,b,c){wa(a.length!=m);return u.indexOf.call(a,b,c)}:function(a,b,c){c=c==m?0:c<0?Math.max(0,a.length+c):c;if(p(a))return!p(b)||b.length!=1?-1:a.indexOf(b,c);for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ya=u.forEach?function(a,b,c){wa(a.length!=m);u.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,f=p(a)?a.split(""):a,g=0;g<d;g++)g in f&&b.call(c,f[g],g,a)},za=u.every?function(a,b,c){wa(a.length!=m);return u.every.call(a,b,c)}:function(a,
-b,c){for(var d=a.length,f=p(a)?a.split(""):a,g=0;g<d;g++)if(g in f&&!b.call(c,f[g],g,a))return!1;return!0},Aa=function(a,b){return xa(a,b)>=0},Ba=function(a,b){var c=xa(a,b);c>=0&&(wa(a.length!=m),u.splice.call(a,c,1))},Ca=function(a){return u.concat.apply(u,arguments)},Da=function(a){if(ea(a))return Ca(a);else{for(var b=[],c=0,d=a.length;c<d;c++)b[c]=a[c];return b}},Fa=function(a,b,c,d){wa(a.length!=m);u.splice.apply(a,Ea(arguments,1))},Ea=function(a,b,c){wa(a.length!=m);return arguments.length<=
-2?u.slice.call(a,b):u.slice.call(a,b,c)};var Ga=function(a,b){for(var c in a)b.call(i,a[c],c,a)},Ha=function(a,b,c){b in a&&e(Error('The object already contains the key "'+b+'"'));a[b]=c},Ia=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ja="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(","),Ka=function(a,b){for(var c,d,f=1;f<arguments.length;f++){d=arguments[f];for(c in d)a[c]=d[c];for(var g=0;g<Ja.length;g++)c=Ja[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var La,Ma,Na,Oa,Pa=function(){return o.navigator?o.navigator.userAgent:m};Oa=Na=Ma=La=!1;var Qa;if(Qa=Pa()){var Ra=o.navigator;La=Qa.indexOf("Opera")==0;Ma=!La&&Qa.indexOf("MSIE")!=-1;Na=!La&&Qa.indexOf("WebKit")!=-1;Oa=!La&&!Na&&Ra.product=="Gecko"}var Sa=La,v=Ma,w=Oa,x=Na,Ta=o.navigator,Ua=(Ta&&Ta.platform||"").indexOf("Mac")!=-1,Va;
-a:{var Wa="",Xa;if(Sa&&o.opera)var Ya=o.opera.version,Wa=typeof Ya=="function"?Ya():Ya;else if(w?Xa=/rv\:([^\);]+)(\)|;)/:v?Xa=/MSIE\s+([^\);]+)(\)|;)/:x&&(Xa=/WebKit\/(\S+)/),Xa)var Za=Xa.exec(Pa()),Wa=Za?Za[1]:"";if(v){var $a,ab=o.document;$a=ab?ab.documentMode:i;if($a>parseFloat(Wa)){Va=String($a);break a}}Va=Wa}var bb=Va,cb={},y=function(a){return cb[a]||(cb[a]=ua(bb,a)>=0)},db={},eb=function(){return db[9]||(db[9]=v&&document.documentMode&&document.documentMode>=9)};var fb,gb=!v||eb();!w&&!v||v&&eb()||w&&y("1.9.1");var hb=v&&!y("9");var ib=function(a){return(a=a.className)&&typeof a.split=="function"?a.split(/\s+/):[]},z=function(a,b){var c=ib(a),d=Ea(arguments,1),f;f=c;for(var g=0,h=0;h<d.length;h++)Aa(f,d[h])||(f.push(d[h]),g++);f=g==d.length;a.className=c.join(" ");return f},jb=function(a,b){var c=ib(a),d=Ea(arguments,1),f;f=c;for(var g=0,h=0;h<f.length;h++)Aa(d,f[h])&&(Fa(f,h--,1),g++);f=g==d.length;a.className=c.join(" ");return f};var mb=function(a){return a?new kb(lb(a)):fb||(fb=new kb)},nb=function(a){return p(a)?document.getElementById(a):a},ob=function(a,b,c){c=c||document;a=a&&a!="*"?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(!x||document.compatMode=="CSS1Compat"||y("528"))&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName)if(c=c.getElementsByClassName(b),a){for(var d={},f=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[f++]=h);d.length=f;return d}else return c;c=c.getElementsByTagName(a||
-"*");if(b){d={};for(g=f=0;h=c[g];g++)a=h.className,typeof a.split=="function"&&Aa(a.split(/\s+/),b)&&(d[f++]=h);d.length=f;return d}else return c},qb=function(a,b){Ga(b,function(b,d){d=="style"?a.style.cssText=b:d=="class"?a.className=b:d=="for"?a.htmlFor=b:d in pb?a.setAttribute(pb[d],b):d.lastIndexOf("aria-",0)==0?a.setAttribute(d,b):a[d]=b})},pb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",rowspan:"rowSpan",valign:"vAlign",height:"height",width:"width",usemap:"useMap",
-frameborder:"frameBorder",maxlength:"maxLength",type:"type"},sb=function(a,b,c){return rb(document,arguments)},rb=function(a,b){var c=b[0],d=b[1];if(!gb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',sa(d.name),'"');if(d.type){c.push(' type="',sa(d.type),'"');var f={};Ka(f,d);d=f;delete d.type}c.push(">");c=c.join("")}c=a.createElement(c);if(d)p(d)?c.className=d:ea(d)?z.apply(m,[c].concat(d)):qb(c,d);b.length>2&&tb(a,c,b);return c},tb=function(a,b,c){function d(c){c&&b.appendChild(p(c)?
-a.createTextNode(c):c)}for(var f=2;f<c.length;f++){var g=c[f];if(fa(g)&&!(ga(g)&&g.nodeType>0)){var h;a:{if(g&&typeof g.length=="number")if(ga(g)){h=typeof g.item=="function"||typeof g.item=="string";break a}else if(q(g)){h=typeof g.item=="function";break a}h=!1}ya(h?Da(g):g,d)}else d(g)}},ub=function(a){a&&a.parentNode&&a.parentNode.removeChild(a)},vb=function(a){for(;a&&a.nodeType!=1;)a=a.nextSibling;return a},wb=function(a,b){if(a.contains&&b.nodeType==1)return a==b||a.contains(b);if(typeof a.compareDocumentPosition!=
-"undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},lb=function(a){return a.nodeType==9?a:a.ownerDocument||a.document},xb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&a.firstChild.nodeType==3){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(lb(a).createTextNode(b))}},yb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},zb={IMG:" ",
-BR:"\n"},Ab=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,typeof a=="number"&&a>=0&&a<32768):!1},Bb=function(a,b,c){if(!(a.nodeName in yb))if(a.nodeType==3)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in zb)b.push(zb[a.nodeName]);else for(a=a.firstChild;a;)Bb(a,b,c),a=a.nextSibling},kb=function(a){this.H=a||o.document||document};n=kb.prototype;n.Ka=mb;n.a=function(a){return p(a)?this.H.getElementById(a):a};
-n.l=function(a,b,c){return rb(this.H,arguments)};n.createElement=function(a){return this.H.createElement(a)};n.createTextNode=function(a){return this.H.createTextNode(a)};n.appendChild=function(a,b){a.appendChild(b)};n.contains=wb;var Cb=new Function("a","return a");var Db,Eb=!v||eb(),Fb=v&&!y("8");var A=function(){};A.prototype.ab=!1;A.prototype.z=function(){if(!this.ab)this.ab=!0,this.f()};A.prototype.f=function(){this.gc&&Gb.apply(m,this.gc)};var Gb=function(a){for(var b=0,c=arguments.length;b<c;++b){var d=arguments[b];fa(d)?Gb.apply(m,d):d&&typeof d.z=="function"&&d.z()}};var B=function(a,b){this.type=a;this.currentTarget=this.target=b};t(B,A);n=B.prototype;n.f=function(){delete this.type;delete this.target;delete this.currentTarget};n.ba=!1;n.va=!0;n.stopPropagation=function(){this.ba=!0};n.preventDefault=function(){this.va=!1};var C=function(a,b){a&&this.ua(a,b)};t(C,B);var Hb=[1,4,2];n=C.prototype;n.target=m;n.relatedTarget=m;n.offsetX=0;n.offsetY=0;n.clientX=0;n.clientY=0;n.screenX=0;n.screenY=0;n.button=0;n.keyCode=0;n.charCode=0;n.ctrlKey=!1;n.altKey=!1;n.shiftKey=!1;n.metaKey=!1;n.Wb=!1;n.L=m;
-n.ua=function(a,b){var c=this.type=a.type;B.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(w){var f;a:{try{Cb(d.nodeName);f=!0;break a}catch(g){}f=!1}f||(d=m)}}else if(c=="mouseover")d=a.fromElement;else if(c=="mouseout")d=a.toElement;this.relatedTarget=d;this.offsetX=a.offsetX!==i?a.offsetX:a.layerX;this.offsetY=a.offsetY!==i?a.offsetY:a.layerY;this.clientX=a.clientX!==i?a.clientX:a.pageX;this.clientY=a.clientY!==i?a.clientY:a.pageY;this.screenX=
-a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||(c=="keypress"?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.Wb=Ua?a.metaKey:a.ctrlKey;this.state=a.state;this.L=a;delete this.va;delete this.ba};var Ib=function(a){return Eb?a.L.button==0:a.type=="click"?!0:!!(a.L.button&Hb[0])};
-C.prototype.stopPropagation=function(){C.c.stopPropagation.call(this);this.L.stopPropagation?this.L.stopPropagation():this.L.cancelBubble=!0};C.prototype.preventDefault=function(){C.c.preventDefault.call(this);var a=this.L;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Fb)try{if(a.ctrlKey||a.keyCode>=112&&a.keyCode<=123)a.keyCode=-1}catch(b){}};C.prototype.f=function(){C.c.f.call(this);this.relatedTarget=this.currentTarget=this.target=this.L=m};var D=function(a,b){this.Cb=b;this.aa=[];a>this.Cb&&e(Error("[goog.structs.SimplePool] Initial cannot be greater than max"));for(var c=0;c<a;c++)this.aa.push(this.M?this.M():{})};t(D,A);D.prototype.M=m;D.prototype.Db=m;D.prototype.getObject=function(){return this.aa.length?this.aa.pop():this.M?this.M():{}};var Kb=function(a,b){a.aa.length<a.Cb?a.aa.push(b):Jb(a,b)},Jb=function(a,b){if(a.Db)a.Db(b);else if(ga(b))if(q(b.z))b.z();else for(var c in b)delete b[c]};
-D.prototype.f=function(){D.c.f.call(this);for(var a=this.aa;a.length;)Jb(this,a.pop());delete this.aa};var Lb,Mb=(Lb="ScriptEngine"in o&&o.ScriptEngine()=="JScript")?o.ScriptEngineMajorVersion()+"."+o.ScriptEngineMinorVersion()+"."+o.ScriptEngineBuildVersion():"0";var Nb=function(){},Ob=0;n=Nb.prototype;n.key=0;n.$=!1;n.Ab=!1;n.ua=function(a,b,c,d,f,g){q(a)?this.yb=!0:a&&a.handleEvent&&q(a.handleEvent)?this.yb=!1:e(Error("Invalid listener argument"));this.fa=a;this.rb=b;this.src=c;this.type=d;this.capture=!!f;this.Fa=g;this.Ab=!1;this.key=++Ob;this.$=!1};n.handleEvent=function(a){return this.yb?this.fa.call(this.Fa||this.src,a):this.fa.handleEvent.call(this.fa,a)};var Pb,Qb,Rb,Sb,Tb,Ub,Vb,Wb,Xb,Yb,Zb;
-(function(){function a(){return{I:0,F:0}}function b(){return[]}function c(){var a=function(b){b=h.call(a.src,a.key,b);if(!b)return b};return a}function d(){return new Nb}function f(){return new C}var g=Lb&&!(ua(Mb,"5.7")>=0),h;Ub=function(a){h=a};if(g){Pb=function(){return j.getObject()};Qb=function(a){Kb(j,a)};Rb=function(){return k.getObject()};Sb=function(a){Kb(k,a)};Tb=function(){return l.getObject()};Vb=function(){Kb(l,c())};Wb=function(){return L.getObject()};Xb=function(a){Kb(L,a)};Yb=function(){return r.getObject()};
-Zb=function(a){Kb(r,a)};var j=new D(0,600);j.M=a;var k=new D(0,600);k.M=b;var l=new D(0,600);l.M=c;var L=new D(0,600);L.M=d;var r=new D(0,600);r.M=f}else Pb=a,Qb=ba,Rb=b,Sb=ba,Tb=c,Vb=ba,Wb=d,Xb=ba,Yb=f,Zb=ba})();var $b={},E={},F={},ac={},G=function(a,b,c,d,f){if(b)if(ea(b)){for(var g=0;g<b.length;g++)G(a,b[g],c,d,f);return m}else{var d=!!d,h=E;b in h||(h[b]=Pb());h=h[b];d in h||(h[d]=Pb(),h.I++);var h=h[d],j=s(a),k;h.F++;if(h[j]){k=h[j];for(g=0;g<k.length;g++)if(h=k[g],h.fa==c&&h.Fa==f){if(h.$)break;return k[g].key}}else k=h[j]=Rb(),h.I++;g=Tb();g.src=a;h=Wb();h.ua(c,g,a,b,d,f);c=h.key;g.key=c;k.push(h);$b[c]=h;F[j]||(F[j]=Rb());F[j].push(h);a.addEventListener?(a==o||!a.qb)&&a.addEventListener(b,g,d):a.attachEvent(b in
-ac?ac[b]:ac[b]="on"+b,g);return c}else e(Error("Invalid event type"))},bc=function(a,b,c,d,f){if(ea(b))for(var g=0;g<b.length;g++)bc(a,b[g],c,d,f);else if(d=!!d,a=cc(a,b,d))for(g=0;g<a.length;g++)if(a[g].fa==c&&a[g].capture==d&&a[g].Fa==f){H(a[g].key);break}},H=function(a){if(!$b[a])return!1;var b=$b[a];if(b.$)return!1;var c=b.src,d=b.type,f=b.rb,g=b.capture;c.removeEventListener?(c==o||!c.qb)&&c.removeEventListener(d,f,g):c.detachEvent&&c.detachEvent(d in ac?ac[d]:ac[d]="on"+d,f);c=s(c);f=E[d][g][c];
-if(F[c]){var h=F[c];Ba(h,b);h.length==0&&delete F[c]}b.$=!0;f.wb=!0;dc(d,g,c,f);delete $b[a];return!0},dc=function(a,b,c,d){if(!d.La&&d.wb){for(var f=0,g=0;f<d.length;f++)if(d[f].$){var h=d[f].rb;h.src=m;Vb(h);Xb(d[f])}else f!=g&&(d[g]=d[f]),g++;d.length=g;d.wb=!1;g==0&&(Sb(d),delete E[a][b][c],E[a][b].I--,E[a][b].I==0&&(Qb(E[a][b]),delete E[a][b],E[a].I--),E[a].I==0&&(Qb(E[a]),delete E[a]))}},ec=function(a){var b,c=0,d=b==m;b=!!b;if(a==m)Ga(F,function(a){for(var f=a.length-1;f>=0;f--){var g=a[f];
-if(d||b==g.capture)H(g.key),c++}});else if(a=s(a),F[a])for(var a=F[a],f=a.length-1;f>=0;f--){var g=a[f];if(d||b==g.capture)H(g.key),c++}},cc=function(a,b,c){var d=E;return b in d&&(d=d[b],c in d&&(d=d[c],a=s(a),d[a]))?d[a]:m},gc=function(a,b,c,d,f){var g=1,b=s(b);if(a[b]){a.F--;a=a[b];a.La?a.La++:a.La=1;try{for(var h=a.length,j=0;j<h;j++){var k=a[j];k&&!k.$&&(g&=fc(k,f)!==!1)}}finally{a.La--,dc(c,d,b,a)}}return Boolean(g)},fc=function(a,b){var c=a.handleEvent(b);a.Ab&&H(a.key);return c};
-Ub(function(a,b){if(!$b[a])return!0;var c=$b[a],d=c.type,f=E;if(!(d in f))return!0;var f=f[d],g,h;Db===i&&(Db=v&&!o.addEventListener);if(Db){var j;if(!(j=b))a:{j="window.event".split(".");for(var k=o;g=j.shift();)if(k[g]!=m)k=k[g];else{j=m;break a}j=k}g=j;j=!0 in f;k=!1 in f;if(j){if(g.keyCode<0||g.returnValue!=i)return!0;a:{var l=!1;if(g.keyCode==0)try{g.keyCode=-1;break a}catch(L){l=!0}if(l||g.returnValue==i)g.returnValue=!0}}l=Yb();l.ua(g,this);g=!0;try{if(j){for(var r=Rb(),I=l.currentTarget;I;I=
-I.parentNode)r.push(I);h=f[!0];h.F=h.I;for(var O=r.length-1;!l.ba&&O>=0&&h.F;O--)l.currentTarget=r[O],g&=gc(h,r[O],d,!0,l);if(k){h=f[!1];h.F=h.I;for(O=0;!l.ba&&O<r.length&&h.F;O++)l.currentTarget=r[O],g&=gc(h,r[O],d,!1,l)}}else g=fc(c,l)}finally{if(r)r.length=0,Sb(r);l.z();Zb(l)}return g}d=new C(b,this);try{g=fc(c,d)}finally{d.z()}return g});var hc=function(a){this.Bb=a;this.Ma=[]};t(hc,A);var ic=[],J=function(a,b,c,d){ea(c)||(ic[0]=c,c=ic);for(var f=0;f<c.length;f++)a.Ma.push(G(b,c[f],d||a,!1,a.Bb||a));return a},K=function(a,b,c,d,f,g){if(ea(c))for(var h=0;h<c.length;h++)K(a,b,c[h],d,f,g);else{a:{d=d||a;g=g||a.Bb||a;f=!!f;if(b=cc(b,c,f))for(c=0;c<b.length;c++)if(!b[c].$&&b[c].fa==d&&b[c].capture==f&&b[c].Fa==g){b=b[c];break a}b=m}if(b)b=b.key,H(b),Ba(a.Ma,b)}return a},jc=function(a){ya(a.Ma,H);a.Ma.length=0};
-hc.prototype.f=function(){hc.c.f.call(this);jc(this)};hc.prototype.handleEvent=function(){e(Error("EventHandler.handleEvent not implemented"))};var kc=function(){};t(kc,A);n=kc.prototype;n.qb=!0;n.Ea=m;n.bb=function(a){this.Ea=a};n.addEventListener=function(a,b,c,d){G(this,a,b,c,d)};n.removeEventListener=function(a,b,c,d){bc(this,a,b,c,d)};
-n.dispatchEvent=function(a){var b=a.type||a,c=E;if(b in c){if(p(a))a=new B(a,this);else if(a instanceof B)a.target=a.target||this;else{var d=a,a=new B(b,this);Ka(a,d)}var d=1,f,c=c[b],b=!0 in c,g;if(b){f=[];for(g=this;g;g=g.Ea)f.push(g);g=c[!0];g.F=g.I;for(var h=f.length-1;!a.ba&&h>=0&&g.F;h--)a.currentTarget=f[h],d&=gc(g,f[h],a.type,!0,a)&&a.va!=!1}if(!1 in c)if(g=c[!1],g.F=g.I,b)for(h=0;!a.ba&&h<f.length&&g.F;h++)a.currentTarget=f[h],d&=gc(g,f[h],a.type,!1,a)&&a.va!=!1;else for(f=this;!a.ba&&f&&
-g.F;f=f.Ea)a.currentTarget=f,d&=gc(g,f,a.type,!1,a)&&a.va!=!1;a=Boolean(d)}else a=!0;return a};n.f=function(){kc.c.f.call(this);ec(this);this.Ea=m};var M=function(a,b){a.style.display=b?"":"none"},lc=w?"MozUserSelect":x?"WebkitUserSelect":m,mc=function(a,b,c){c=!c?a.getElementsByTagName("*"):m;if(lc){if(b=b?"none":"",a.style[lc]=b,c)for(var a=0,d;d=c[a];a++)d.style[lc]=b}else if(v||Sa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var nc=function(){};ca(nc);nc.prototype.Zb=0;nc.Q();var N=function(a){this.q=a||mb();this.sa=oc};t(N,kc);N.prototype.Yb=nc.Q();var oc=m,pc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}e(Error("Invalid component state"))};n=N.prototype;n.ga=m;n.e=!1;n.d=m;n.sa=m;n.Lb=m;n.o=m;n.p=m;n.t=m;n.lb=!1;
-var qc=function(a){return a.ga||(a.ga=":"+(a.Yb.Zb++).toString(36))},rc=function(a,b){if(a.o&&a.o.t){var c=a.o.t,d=a.ga;d in c&&delete c[d];Ha(a.o.t,b,a)}a.ga=b};N.prototype.a=function(){return this.d};var sc=function(a){return a.da||(a.da=new hc(a))},uc=function(a,b){a==b&&e(Error("Unable to set parent component"));b&&a.o&&a.ga&&tc(a.o,a.ga)&&a.o!=b&&e(Error("Unable to set parent component"));a.o=b;N.c.bb.call(a,b)};n=N.prototype;n.getParent=function(){return this.o};
-n.bb=function(a){this.o&&this.o!=a&&e(Error("Method not supported"));N.c.bb.call(this,a)};n.Ka=function(){return this.q};n.l=function(){this.d=this.q.createElement("div")};n.J=function(a){if(this.e)e(Error("Component already rendered"));else if(a&&this.Z(a)){this.lb=!0;if(!this.q||this.q.H!=lb(a))this.q=mb(a);this.Ua(a);this.r()}else e(Error("Invalid element to decorate"))};n.Z=function(){return!0};n.Ua=function(a){this.d=a};n.r=function(){this.e=!0;vc(this,function(a){!a.e&&a.a()&&a.r()})};
-n.V=function(){vc(this,function(a){a.e&&a.V()});this.da&&jc(this.da);this.e=!1};n.f=function(){N.c.f.call(this);this.e&&this.V();this.da&&(this.da.z(),delete this.da);vc(this,function(a){a.z()});!this.lb&&this.d&&ub(this.d);this.o=this.Lb=this.d=this.t=this.p=m};n.Da=function(a,b){this.Ra(a,wc(this),b)};
-n.Ra=function(a,b,c){a.e&&(c||!this.e)&&e(Error("Component already rendered"));(b<0||b>wc(this))&&e(Error("Child component index out of bounds"));if(!this.t||!this.p)this.t={},this.p=[];a.getParent()==this?(this.t[qc(a)]=a,Ba(this.p,a)):Ha(this.t,qc(a),a);uc(a,this);Fa(this.p,b,0,a);a.e&&this.e&&a.getParent()==this?(c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||m)):c?(this.d||this.l(),c=P(this,b+1),b=this.B(),c=c?c.d:m,a.e&&e(Error("Component already rendered")),a.d||a.l(),b?b.insertBefore(a.d,
-c||m):a.q.H.body.appendChild(a.d),(!a.o||a.o.e)&&a.r()):this.e&&!a.e&&a.d&&a.r()};n.B=function(){return this.d};var xc=function(a){if(a.sa==m){var b;a:{b=a.e?a.d:a.q.H.body;var c=lb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,m))){b=b.direction||b.getPropertyValue("direction");break a}b=""}a.sa="rtl"==(b||((a.e?a.d:a.q.H.body).currentStyle?(a.e?a.d:a.q.H.body).currentStyle.direction:m)||(a.e?a.d:a.q.H.body).style.direction)}return a.sa};
-N.prototype.pa=function(a){this.e&&e(Error("Component already rendered"));this.sa=a};var wc=function(a){return a.p?a.p.length:0},tc=function(a,b){return a.t&&b?(b in a.t?a.t[b]:i)||m:m},P=function(a,b){return a.p?a.p[b]||m:m},vc=function(a,b,c){a.p&&ya(a.p,b,c)},yc=function(a,b){return a.p&&b?xa(a.p,b):-1};
-N.prototype.removeChild=function(a,b){if(a){var c=p(a)?a:qc(a),a=tc(this,c);if(c&&a){var d=this.t;c in d&&delete d[c];Ba(this.p,a);b&&(a.V(),a.d&&ub(a.d));uc(a,m)}}a||e(Error("Child is not in parent component"));return a};var zc=function(a,b){a.setAttribute("role",b);a.mc=b};var Bc=function(a,b,c,d,f){if(!v&&(!x||!y("525")))return!0;if(Ua&&f)return Ac(a);if(f&&!d)return!1;if(!c&&(b==17||b==18))return!1;if(v&&d&&b==a)return!1;switch(a){case 13:return!(v&&eb());case 27:return!x}return Ac(a)},Ac=function(a){if(a>=48&&a<=57)return!0;if(a>=96&&a<=106)return!0;if(a>=65&&a<=90)return!0;if(x&&a==0)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;default:return!1}};var Q=function(a,b){a&&Cc(this,a,b)};t(Q,kc);n=Q.prototype;n.d=m;n.Ia=m;n.Wa=m;n.Ja=m;n.S=-1;n.R=-1;
-var Dc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},Ec={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},Fc={61:187,59:186},Gc=v||x&&y("525");
-Q.prototype.Qb=function(a){if(x&&(this.S==17&&!a.ctrlKey||this.S==18&&!a.altKey))this.R=this.S=-1;Gc&&!Bc(a.keyCode,this.S,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):this.R=w&&a.keyCode in Fc?Fc[a.keyCode]:a.keyCode};Q.prototype.Rb=function(){this.R=this.S=-1};
-Q.prototype.handleEvent=function(a){var b=a.L,c,d;v&&a.type=="keypress"?(c=this.R,d=c!=13&&c!=27?b.keyCode:0):x&&a.type=="keypress"?(c=this.R,d=b.charCode>=0&&b.charCode<63232&&Ac(c)?b.charCode:0):Sa?(c=this.R,d=Ac(c)?b.keyCode:0):(c=b.keyCode||this.R,d=b.charCode||0,Ua&&d==63&&!c&&(c=191));var f=c,g=b.keyIdentifier;c?c>=63232&&c in Dc?f=Dc[c]:c==25&&a.shiftKey&&(f=9):g&&g in Ec&&(f=Ec[g]);a=f==this.S;this.S=f;b=new Hc(f,d,a,b);try{this.dispatchEvent(b)}finally{b.z()}};Q.prototype.a=function(){return this.d};
-var Cc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=G(a.d,"keypress",a,c);a.Wa=G(a.d,"keydown",a.Qb,c,a);a.Ja=G(a.d,"keyup",a.Rb,c,a)};Q.prototype.detach=function(){if(this.Ia)H(this.Ia),H(this.Wa),H(this.Ja),this.Ja=this.Wa=this.Ia=m;this.d=m;this.R=this.S=-1};Q.prototype.f=function(){Q.c.f.call(this);this.detach()};var Hc=function(a,b,c,d){d&&this.ua(d,i);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};t(Hc,C);var Jc=function(a,b){a||e(Error("Invalid class name "+a));q(b)||e(Error("Invalid decorator function "+b));Ic[a]=b},Kc={},Ic={};var R=function(){},Lc;ca(R);n=R.prototype;n.ea=function(){};n.l=function(a){return a.Ka().l("div",this.ta(a).join(" "),a.xa)};n.B=function(a){return a};n.ra=function(a,b,c){if(a=a.a?a.a():a)if(v&&!y("7")){var d=Mc(ib(a),b);d.push(b);ja(c?z:jb,a).apply(m,d)}else c?z(a,b):jb(a,b)};n.Z=function(){return!0};
-n.J=function(a,b){b.id&&rc(a,b.id);var c=this.B(b);a.xa=c&&c.firstChild?c.firstChild.nextSibling?Da(c.childNodes):c.firstChild:m;var d=0,f=this.m(),g=this.m(),h=!1,j=!1,c=!1,k=ib(b);ya(k,function(a){if(!h&&a==f)h=!0,g==f&&(j=!0);else if(!j&&a==g)j=!0;else{var b=d;if(!this.ob)this.Ha||Nc(this),this.ob=Ia(this.Ha);a=parseInt(this.ob[a],10);d=b|(isNaN(a)?0:a)}},this);a.h=d;h||(k.push(f),g==f&&(j=!0));j||k.push(g);var l=a.C;l&&k.push.apply(k,l);if(v&&!y("7")){var L=Mc(k);L.length>0&&(k.push.apply(k,L),
-c=!0)}if(!h||!j||l||c)b.className=k.join(" ");return b};n.Na=function(a){xc(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.la(a,a.G())};n.za=function(a,b){mc(a,!b,!v&&!Sa)};n.pa=function(a,b){this.ra(a,this.m()+"-rtl",b)};n.U=function(a){var b;return a.s&32&&(b=a.k())?Ab(b):!1};n.la=function(a,b){var c;if(a.s&32&&(c=a.k())){if(!b&&a.h&32){try{c.blur()}catch(d){}a.h&32&&a.ma(m)}if(Ab(c)!=b)b?c.tabIndex=0:c.removeAttribute("tabIndex")}};n.ia=function(a,b){M(a,b)};
-n.v=function(a,b,c){var d=a.a();if(d){var f=Oc(this,b);f&&this.ra(a,f,c);Lc||(Lc={1:"disabled",4:"pressed",8:"selected",16:"checked",64:"expanded"});(a=Lc[b])&&d.setAttribute("aria-"+a,c)}};n.k=function(a){return a.a()};n.m=function(){return"goog-control"};n.ta=function(a){var b=this.m(),c=[b],d=this.m();d!=b&&c.push(d);b=a.h;for(d=[];b;){var f=b&-b;d.push(Oc(this,f));b&=~f}c.push.apply(c,d);(a=a.C)&&c.push.apply(c,a);v&&!y("7")&&c.push.apply(c,Mc(c));return c};
-var Mc=function(a,b){var c=[];b&&(a=a.concat([b]));ya([],function(d){za(d,ja(Aa,a))&&(!b||Aa(d,b))&&c.push(d.join("_"))});return c},Oc=function(a,b){a.Ha||Nc(a);return a.Ha[b]},Nc=function(a){var b=a.m();a.Ha={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){N.call(this,c);if(!b){for(var b=this.constructor,d;b;){d=s(b);if(d=Kc[d])break;b=b.c?b.c.constructor:m}b=d?q(d.Q)?d.Q():new d:m}this.b=b;this.xa=a};t(S,N);n=S.prototype;n.xa=m;n.h=0;n.s=39;n.Xb=255;n.Ba=0;n.n=!0;n.C=m;n.ja=!0;n.ya=!1;n.k=function(){return this.b.k(this)};n.Aa=function(){return this.u||(this.u=new Q)};n.vb=function(){return this.b};
-n.ra=function(a,b){if(b){if(a)this.C?Aa(this.C,a)||this.C.push(a):this.C=[a],this.b.ra(this,a,!0)}else if(a&&this.C){Ba(this.C,a);if(this.C.length==0)this.C=m;this.b.ra(this,a,!1)}};n.l=function(){var a=this.b.l(this);this.d=a;var b=this.b.ea();b&&zc(a,b);this.ya||this.b.za(a,!1);this.G()||this.b.ia(a,!1)};n.B=function(){return this.b.B(this.a())};n.Z=function(a){return this.b.Z(a)};
-n.Ua=function(a){this.d=a=this.b.J(this,a);var b=this.b.ea();b&&zc(a,b);this.ya||this.b.za(a,!1);this.n=a.style.display!="none"};n.r=function(){S.c.r.call(this);this.b.Na(this);if(this.s&-2&&(this.ja&&Pc(this,!0),this.s&32)){var a=this.k();if(a){var b=this.Aa();Cc(b,a);J(J(J(sc(this),b,"key",this.O),a,"focus",this.oa),a,"blur",this.ma)}}};
-var Pc=function(a,b){var c=sc(a),d=a.a();b?(J(J(J(J(c,d,"mouseover",a.Za),d,"mousedown",a.ka),d,"mouseup",a.$a),d,"mouseout",a.Ya),v&&J(c,d,"dblclick",a.sb)):(K(K(K(K(c,d,"mouseover",a.Za),d,"mousedown",a.ka),d,"mouseup",a.$a),d,"mouseout",a.Ya),v&&K(c,d,"dblclick",a.sb))};n=S.prototype;n.V=function(){S.c.V.call(this);this.u&&this.u.detach();this.G()&&this.isEnabled()&&this.b.la(this,!1)};n.f=function(){S.c.f.call(this);this.u&&(this.u.z(),delete this.u);delete this.b;this.C=this.xa=m};
-n.pa=function(a){S.c.pa.call(this,a);var b=this.a();b&&this.b.pa(b,a)};n.za=function(a){this.ya=a;var b=this.a();b&&this.b.za(b,a)};n.G=function(){return this.n};n.ia=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ia(c,a);this.isEnabled()&&this.b.la(this,a);this.n=a;return!0}return!1};n.isEnabled=function(){return!(this.h&1)};
-n.qa=function(a){var b=this.getParent();if((!b||typeof b.isEnabled!="function"||b.isEnabled())&&T(this,1,!a))a||(this.setActive(!1),this.A(!1)),this.G()&&this.b.la(this,a),this.v(1,!a)};n.A=function(a){T(this,2,a)&&this.v(2,a)};n.setActive=function(a){T(this,4,a)&&this.v(4,a)};var Qc=function(a,b){T(a,8,b)&&a.v(8,b)},Rc=function(a,b){T(a,64,b)&&a.v(64,b)};S.prototype.v=function(a,b){if(this.s&a&&b!=!!(this.h&a))this.b.v(this,a,b),this.h=b?this.h|a:this.h&~a};
-var Sc=function(a,b,c){a.e&&a.h&b&&!c&&e(Error("Component already rendered"));!c&&a.h&b&&a.v(b,!1);a.s=c?a.s|b:a.s&~b},U=function(a,b){return!!(a.Xb&b)&&!!(a.s&b)},T=function(a,b,c){return!!(a.s&b)&&!!(a.h&b)!=c&&(!(a.Ba&b)||a.dispatchEvent(pc(b,c)))&&!a.ab};n=S.prototype;n.Za=function(a){(!a.relatedTarget||!wb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2)&&this.A(!0)};
-n.Ya=function(a){if((!a.relatedTarget||!wb(this.a(),a.relatedTarget))&&this.dispatchEvent("leave"))U(this,4)&&this.setActive(!1),U(this,2)&&this.A(!1)};n.ka=function(a){if(this.isEnabled()&&(U(this,2)&&this.A(!0),Ib(a)&&(!x||!Ua||!a.ctrlKey)))U(this,4)&&this.setActive(!0),this.b.U(this)&&this.k().focus();!this.ya&&Ib(a)&&(!x||!Ua||!a.ctrlKey)&&a.preventDefault()};n.$a=function(a){this.isEnabled()&&(U(this,2)&&this.A(!0),this.h&4&&Tc(this,a)&&U(this,4)&&this.setActive(!1))};
-n.sb=function(a){this.isEnabled()&&Tc(this,a)};var Tc=function(a,b){if(U(a,16)){var c=!(a.h&16);T(a,16,c)&&a.v(16,c)}U(a,8)&&Qc(a,!0);U(a,64)&&Rc(a,!(a.h&64));c=new B("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey","platformModifierKey"],f,g=0;f=d[g];g++)c[f]=b[f];return a.dispatchEvent(c)};S.prototype.oa=function(){U(this,32)&&T(this,32,!0)&&this.v(32,!0)};S.prototype.ma=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.v(32,!1)};
-S.prototype.O=function(a){return this.G()&&this.isEnabled()&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.jb=function(a){return a.keyCode==13&&Tc(this,a)};q(S)||e(Error("Invalid component class "+S));q(R)||e(Error("Invalid renderer class "+R));var Uc=s(S);Kc[Uc]=R;Jc("goog-control",function(){return new S(m)});var Vc=function(){};t(Vc,R);ca(Vc);Vc.prototype.l=function(a){return a.Ka().l("div",this.m())};Vc.prototype.J=function(a,b){if(b.tagName=="HR"){var c=b,b=this.l(a);c.parentNode&&c.parentNode.insertBefore(b,c);ub(c)}else z(b,this.m());return b};Vc.prototype.m=function(){return"goog-menuseparator"};var Wc=function(a,b){S.call(this,m,a||Vc.Q(),b);Sc(this,1,!1);Sc(this,2,!1);Sc(this,4,!1);Sc(this,32,!1);this.h=1};t(Wc,S);Wc.prototype.r=function(){Wc.c.r.call(this);zc(this.a(),"separator")};Jc("goog-menuseparator",function(){return new Wc});var V=function(){};ca(V);V.prototype.ea=function(){};var Xc=function(a,b){if(a)a.tabIndex=b?0:-1};n=V.prototype;n.l=function(a){return a.Ka().l("div",this.ta(a).join(" "))};n.B=function(a){return a};n.Z=function(a){return a.tagName=="DIV"};n.J=function(a,b){b.id&&rc(a,b.id);var c=this.m(),d=!1,f=ib(b);f&&ya(f,function(b){b==c?d=!0:b&&this.Va(a,b,c)},this);d||z(b,c);Yc(a,this.B(b));return b};
-n.Va=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?Zc(a,"horizontal"):b==c+"-vertical"&&Zc(a,"vertical")};var Yc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(c.nodeType==1){var f;a:{f=i;for(var g=ib(c),h=0,j=g.length;h<j;h++)if(f=g[h]in Ic?Ic[g[h]]():m)break a;f=m}if(f)f.d=c,a.isEnabled()||f.qa(!1),a.Da(f),f.J(c)}else(!c.nodeValue||ma(c.nodeValue)=="")&&b.removeChild(c);c=d}};
-V.prototype.Na=function(a){a=a.a();mc(a,!0,w);if(v)a.hideFocus=!0;var b=this.ea();b&&zc(a,b)};V.prototype.k=function(a){return a.a()};V.prototype.m=function(){return"goog-container"};V.prototype.ta=function(a){var b=this.m(),c=[b,a.P=="horizontal"?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var W=function(a,b,c){N.call(this,c);this.b=b||V.Q();this.P=a||"vertical"};t(W,N);n=W.prototype;n.Oa=m;n.u=m;n.b=m;n.P=m;n.n=!0;n.X=!0;n.Xa=!0;n.i=-1;n.g=m;n.ca=!1;n.Pb=!1;n.Ob=!0;n.N=m;n.k=function(){return this.Oa||this.b.k(this)};n.Aa=function(){return this.u||(this.u=new Q(this.k()))};n.vb=function(){return this.b};n.l=function(){this.d=this.b.l(this)};n.B=function(){return this.b.B(this.a())};n.Z=function(a){return this.b.Z(a)};
-n.Ua=function(a){this.d=this.b.J(this,a);if(a.style.display=="none")this.n=!1};n.r=function(){W.c.r.call(this);vc(this,function(a){a.e&&$c(this,a)},this);var a=this.a();this.b.Na(this);this.ia(this.n,!0);J(J(J(J(J(J(J(J(sc(this),this,"enter",this.Hb),this,"highlight",this.Ib),this,"unhighlight",this.Kb),this,"open",this.Jb),this,"close",this.Fb),a,"mousedown",this.ka),lb(a),"mouseup",this.Gb),a,["mousedown","mouseup","mouseover","mouseout"],this.Eb);this.U()&&ad(this,!0)};
-var ad=function(a,b){var c=sc(a),d=a.k();b?J(J(J(c,d,"focus",a.oa),d,"blur",a.ma),a.Aa(),"key",a.O):K(K(K(c,d,"focus",a.oa),d,"blur",a.ma),a.Aa(),"key",a.O)};n=W.prototype;n.V=function(){bd(this,-1);this.g&&Rc(this.g,!1);this.ca=!1;W.c.V.call(this)};n.f=function(){W.c.f.call(this);if(this.u)this.u.z(),this.u=m;this.b=this.g=this.N=this.Oa=m};n.Hb=function(){return!0};
-n.Ib=function(a){var b=yc(this,a.target);if(b>-1&&b!=this.i){var c=P(this,this.i);c&&c.A(!1);this.i=b;c=P(this,this.i);this.ca&&c.setActive(!0);this.Ob&&this.g&&c!=this.g&&(c.s&64?Rc(c,!0):Rc(this.g,!1))}this.a().setAttribute("aria-activedescendant",a.target.a().id)};n.Kb=function(a){if(a.target==P(this,this.i))this.i=-1;this.a().setAttribute("aria-activedescendant","")};n.Jb=function(a){if((a=a.target)&&a!=this.g&&a.getParent()==this)this.g&&Rc(this.g,!1),this.g=a};
-n.Fb=function(a){if(a.target==this.g)this.g=m};n.ka=function(a){if(this.X)this.ca=!0;var b=this.k();b&&Ab(b)?b.focus():a.preventDefault()};n.Gb=function(){this.ca=!1};n.Eb=function(a){var b;a:{b=a.target;if(this.N)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.N){b=this.N[d];break a}b=b.parentNode}b=m}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.$a(a);break;case "mouseover":b.Za(a);break;case "mouseout":b.Ya(a)}};n.oa=function(){};
-n.ma=function(){bd(this,-1);this.ca=!1;this.g&&Rc(this.g,!1)};n.O=function(a){return this.isEnabled()&&this.G()&&(wc(this)!=0||this.Oa)&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-n.jb=function(a){var b=P(this,this.i);if(b&&typeof b.O=="function"&&b.O(a))return!0;if(this.g&&this.g!=b&&typeof this.g.O=="function"&&this.g.O(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.U())this.k().blur();else return!1;break;case 36:cd(this);break;case 35:dd(this);break;case 38:if(this.P=="vertical")ed(this);else return!1;break;case 37:if(this.P=="horizontal")xc(this)?fd(this):ed(this);else return!1;break;case 40:if(this.P=="vertical")fd(this);
-else return!1;break;case 39:if(this.P=="horizontal")xc(this)?ed(this):fd(this);else return!1;break;default:return!1}return!0};var $c=function(a,b){var c=b.a(),c=c.id||(c.id=qc(b));if(!a.N)a.N={};a.N[c]=b};W.prototype.Da=function(a,b){W.c.Da.call(this,a,b)};W.prototype.Ra=function(a,b,c){a.Ba|=2;a.Ba|=64;(this.U()||!this.Pb)&&Sc(a,32,!1);a.e&&!1!=a.ja&&Pc(a,!1);a.ja=!1;W.c.Ra.call(this,a,b,c);c&&this.e&&$c(this,a);b<=this.i&&this.i++};
-W.prototype.removeChild=function(a,b){if(a=p(a)?tc(this,a):a){var c=yc(this,a);c!=-1&&(c==this.i?a.A(!1):c<this.i&&this.i--);var d=a.a();if(d&&d.id)c=this.N,d=d.id,d in c&&delete c[d]}c=a=W.c.removeChild.call(this,a,b);c.e&&!0!=c.ja&&Pc(c,!0);c.ja=!0;return a};var Zc=function(a,b){a.a()&&e(Error("Component already rendered"));a.P=b};n=W.prototype;n.G=function(){return this.n};
-n.ia=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(M(c,a),this.U()&&Xc(this.k(),this.X&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};n.isEnabled=function(){return this.X};n.qa=function(a){if(this.X!=a&&this.dispatchEvent(a?"enable":"disable"))a?(this.X=!0,vc(this,function(a){a.pb?delete a.pb:a.qa(!0)})):(vc(this,function(a){a.isEnabled()?a.qa(!1):a.pb=!0}),this.ca=this.X=!1),this.U()&&Xc(this.k(),a&&this.n)};
-n.U=function(){return this.Xa};n.la=function(a){a!=this.Xa&&this.e&&ad(this,a);this.Xa=a;this.X&&this.n&&Xc(this.k(),a)};var bd=function(a,b){var c=P(a,b);c?c.A(!0):a.i>-1&&P(a,a.i).A(!1)};W.prototype.A=function(a){bd(this,yc(this,a))};
-var cd=function(a){gd(a,function(a,c){return(a+1)%c},wc(a)-1)},dd=function(a){gd(a,function(a,c){a--;return a<0?c-1:a},0)},fd=function(a){gd(a,function(a,c){return(a+1)%c},a.i)},ed=function(a){gd(a,function(a,c){a--;return a<0?c-1:a},a.i)},gd=function(a,b,c){for(var c=c<0?yc(a,a.g):c,d=wc(a),c=b.call(a,c,d),f=0;f<=d;){var g=P(a,c);if(g&&g.G()&&g.isEnabled()&&g.s&2){a.Ta(c);break}f++;c=b.call(a,c,d)}};W.prototype.Ta=function(a){bd(this,a)};var hd=function(){};t(hd,R);ca(hd);n=hd.prototype;n.m=function(){return"goog-tab"};n.ea=function(){return"tab"};n.l=function(a){var b=hd.c.l.call(this,a);(a=a.Pa())&&this.Sa(b,a);return b};n.J=function(a,b){var b=hd.c.J.call(this,a,b),c=this.Pa(b);if(c)a.mb=c;if(a.h&8&&(c=a.getParent())&&q(c.W))a.v(8,!1),c.W(a);return b};n.Pa=function(a){return a.title||""};n.Sa=function(a,b){if(a)a.title=b||""};var id=function(a,b,c){S.call(this,a,b||hd.Q(),c);Sc(this,8,!0);this.Ba|=9};t(id,S);id.prototype.Pa=function(){return this.mb};id.prototype.Sa=function(a){this.vb().Sa(this.a(),a);this.mb=a};Jc("goog-tab",function(){return new id(m)});var X=function(){};t(X,V);ca(X);X.prototype.m=function(){return"goog-tab-bar"};X.prototype.ea=function(){return"tablist"};X.prototype.Va=function(a,b,c){if(!this.ub)this.Ga||jd(this),this.ub=Ia(this.Ga);var d=this.ub[b];d?(Zc(a,kd(d)),a.nb=d):X.c.Va.call(this,a,b,c)};X.prototype.ta=function(a){var b=X.c.ta.call(this,a);this.Ga||jd(this);b.push(this.Ga[a.nb]);return b};var jd=function(a){var b=a.m();a.Ga={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Y=function(a,b,c){a=a||"top";Zc(this,kd(a));this.nb=a;W.call(this,this.P,b||X.Q(),c);ld(this)};t(Y,W);n=Y.prototype;n.Sb=!0;n.D=m;n.r=function(){Y.c.r.call(this);ld(this)};n.f=function(){Y.c.f.call(this);this.D=m};n.removeChild=function(a,b){md(this,a);return Y.c.removeChild.call(this,a,b)};n.Ta=function(a){Y.c.Ta.call(this,a);this.Sb&&this.W(P(this,a))};n.W=function(a){a?Qc(a,!0):this.D&&Qc(this.D,!1)};
-var md=function(a,b){if(b&&b==a.D){for(var c=yc(a,b),d=c-1;b=P(a,d);d--)if(b.G()&&b.isEnabled()){a.W(b);return}for(c+=1;b=P(a,c);c++)if(b.G()&&b.isEnabled()){a.W(b);return}a.W(m)}};n=Y.prototype;n.cc=function(a){this.D&&this.D!=a.target&&Qc(this.D,!1);this.D=a.target};n.dc=function(a){if(a.target==this.D)this.D=m};n.ac=function(a){md(this,a.target)};n.bc=function(a){md(this,a.target)};n.oa=function(){P(this,this.i)||this.A(this.D||P(this,0))};
-var ld=function(a){J(J(J(J(sc(a),a,"select",a.cc),a,"unselect",a.dc),a,"disable",a.ac),a,"hide",a.bc)},kd=function(a){return a=="start"||a=="end"?"vertical":"horizontal"};Jc("goog-tab-bar",function(){return new Y});var Z=function(a,b,c,d,f){function g(a){if(a)a.tabIndex=0,G(a,"click",h.Mb,!1,h),G(a,"keydown",h.Nb,!1,h)}this.q=f||mb();this.K=this.q.a(a)||m;this.na=this.q.a(d||m);this.Ca=(this.Qa=q(b)?b:m)||!b?m:this.q.a(b);this.j=c==!0;var h=this;g(this.K);g(this.na);this.Y(this.j)};t(Z,kc);n=Z.prototype;n.f=function(){this.K&&ec(this.K);this.na&&ec(this.na);Z.c.f.call(this)};n.B=function(){return this.Ca};n.toggle=function(){this.Y(!this.j)};
-n.Y=function(a){if(this.Ca)M(this.Ca,a);else if(a&&this.Qa)this.Ca=this.Qa();if(this.na)M(this.K,!a),M(this.na,a);else{if(this.K){var b=this.K;a?z(b,"goog-zippy-expanded"):jb(b,"goog-zippy-expanded");b=this.K;!a?z(b,"goog-zippy-collapsed"):jb(b,"goog-zippy-collapsed")}this.K&&this.K.setAttribute("aria-expanded",a)}this.j=a;this.dispatchEvent(new nd("toggle",this,this.j))};n.Nb=function(a){if(a.keyCode==13||a.keyCode==32)this.toggle(),a.preventDefault(),a.stopPropagation()};n.Mb=function(){this.toggle()};
-var nd=function(a,b,c){B.call(this,a,b);this.kc=c};t(nd,B);var pd=function(a,b){this.kb=[];for(var c=nb(a),c=ob("span","ae-zippy",c),d=0,f;f=c[d];d++)this.kb.push(new Z(f,f.parentNode.parentNode.parentNode.nextElementSibling!=i?f.parentNode.parentNode.parentNode.nextElementSibling:vb(f.parentNode.parentNode.parentNode.nextSibling),!1));this.ec=new od(this.kb,nb(b))};pd.prototype.ic=function(){return this.ec};pd.prototype.jc=function(){return this.kb};
-var od=function(a,b){this.wa=a;if(this.wa.length)for(var c=0,d;d=this.wa[c];c++)G(d,"toggle",this.Ub,!1,this);this.ib=0;this.j=!1;c="ae-toggle ae-plus ae-action";this.wa.length||(c+=" ae-disabled");this.T=sb("span",{className:c},"Expand All");G(this.T,"click",this.Tb,!1,this);b&&b.appendChild(this.T)};od.prototype.Tb=function(){this.wa.length&&this.Y(!this.j)};
-od.prototype.Ub=function(a){a=a.currentTarget;a.j?this.ib+=1:this.ib-=1;if(a.j!=this.j)if(a.j)this.j=!0,qd(this,!0);else if(this.ib==0)this.j=!1,qd(this,!1)};od.prototype.Y=function(a){this.j=a;for(var a=0,b;b=this.wa[a];a++)b.j!=this.j&&b.Y(this.j);qd(this)};
-var qd=function(a,b){(b!==i?b:a.j)?(jb(a.T,"ae-plus"),z(a.T,"ae-minus"),xb(a.T,"Collapse All")):(jb(a.T,"ae-minus"),z(a.T,"ae-plus"),xb(a.T,"Expand All"))},rd=function(a){this.Vb=a;this.zb={};var b,c=sb("div",{},b=sb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),sb("div",{className:"goog-tab-bar-clear"}),a=sb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Y;d.J(b);G(d,"select",this.xb,!1,this);G(d,"unselect",this.xb,!1,this);b=0;for(var f;f=
-this.Vb[b];b++)if(f=nb("ae-stats-details-"+f)){var g=ob("h2",m,f)[0],h;h=g;var j=i;hb&&"innerText"in h?j=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(j=[],Bb(h,j,!0),j=j.join(""));j=j.replace(/ \xAD /g," ").replace(/\xAD/g,"");j=j.replace(/\u200B/g,"");hb||(j=j.replace(/ +/g," "));j!=" "&&(j=j.replace(/^\s*/,""));h=j;ub(g);g=new id(h);this.zb[s(g)]=f;d.Da(g,!0);a.appendChild(f);b==0?d.W(g):M(f,!1)}nb("bd").appendChild(c)};rd.prototype.xb=function(a){var b=this.zb[s(a.target)];M(b,a.type=="select")};
-aa("ae.Stats.Details.Tabs",rd);aa("goog.ui.Zippy",Z);Z.prototype.setExpanded=Z.prototype.Y;aa("ae.Stats.MakeZippys",pd);pd.prototype.getExpandCollapse=pd.prototype.ic;pd.prototype.getZippys=pd.prototype.jc;od.prototype.setExpanded=od.prototype.Y;var $=function(){this.cb=[];this.hb=[]},sd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],td=function(a){if(a<=0)return[2,0.5,1];for(var b=1;a<1;)a*=10,b/=10;for(;a>=10;)a/=10,b*=10;for(var c=0;c<sd.length;c++)if(a<=sd[c][2])return[sd[c][0],sd[c][1]*b,sd[c][2]*b];return[5,2*b,10*b]};$.prototype.gb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.hb.push(a)};
-var ud=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var f=0;f<=b;f++)a.write('<img class="'+a.w+'tick" src="'+a.gb+'" alt="" '),a.write('style="left:'+f*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+f*c*d+'%">'),a.write(" "+f*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.hc=function(){this.hb=[];var a=td(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');ud(this,b,c,a);for(var d=0;d<this.cb.length;d++){var f=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');f.label.length>0&&(f.ha.length>0&&this.write('<a class="'+this.w+'link" href="'+f.ha+'">'),this.write(f.label),f.ha.length>0&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');f.ha.length>0&&this.write('<a class="'+
+0,d=ma(String(a)).split("."),f=ma(String(b)).split("."),g=Math.max(d.length,f.length),h=0;c==0&&h<g;h++){var j=d[h]||"",k=f[h]||"",l=RegExp("(\\d*)(\\D*)","g"),M=RegExp("(\\d*)(\\D*)","g");do{var r=l.exec(j)||["","",""],J=M.exec(k)||["","",""];if(r[0].length==0&&J[0].length==0)break;c=ta(r[1].length==0?0:parseInt(r[1],10),J[1].length==0?0:parseInt(J[1],10))||ta(r[2].length==0,J[2].length==0)||ta(r[2],J[2])}while(c==0)}return c},ta=function(a,b){if(a<b)return-1;else if(a>b)return 1;return 0};var va=function(a,b){b.unshift(a);ka.call(this,la.apply(m,b));b.shift();this.lc=a};t(va,ka);va.prototype.name="AssertionError";var u=function(a,b,c){if(!a){var d=Array.prototype.slice.call(arguments,2),f="Assertion failed";if(b){f+=": "+b;var g=d}e(new va(""+f,g||[]))}};var v=Array.prototype,wa=v.indexOf?function(a,b,c){u(a.length!=m);return v.indexOf.call(a,b,c)}:function(a,b,c){c=c==m?0:c<0?Math.max(0,a.length+c):c;if(p(a))return!p(b)||b.length!=1?-1:a.indexOf(b,c);for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},xa=v.forEach?function(a,b,c){u(a.length!=m);v.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,f=p(a)?a.split(""):a,g=0;g<d;g++)g in f&&b.call(c,f[g],g,a)},ya=v.every?function(a,b,c){u(a.length!=m);return v.every.call(a,b,c)}:function(a,
+b,c){for(var d=a.length,f=p(a)?a.split(""):a,g=0;g<d;g++)if(g in f&&!b.call(c,f[g],g,a))return!1;return!0},za=function(a,b){return wa(a,b)>=0},Aa=function(a,b){var c=wa(a,b);c>=0&&(u(a.length!=m),v.splice.call(a,c,1))},Ba=function(a){return v.concat.apply(v,arguments)},Ca=function(a){if(ea(a))return Ba(a);else{for(var b=[],c=0,d=a.length;c<d;c++)b[c]=a[c];return b}},Ea=function(a,b,c,d){u(a.length!=m);v.splice.apply(a,Da(arguments,1))},Da=function(a,b,c){u(a.length!=m);return arguments.length<=2?
+v.slice.call(a,b):v.slice.call(a,b,c)};var Fa=function(a,b){for(var c in a)b.call(i,a[c],c,a)},Ga=function(a,b,c){b in a&&e(Error('The object already contains the key "'+b+'"'));a[b]=c},Ha=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ia="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(","),Ja=function(a,b){for(var c,d,f=1;f<arguments.length;f++){d=arguments[f];for(c in d)a[c]=d[c];for(var g=0;g<Ia.length;g++)c=Ia[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var Ka,La,Ma,Na,Oa=function(){return o.navigator?o.navigator.userAgent:m};Na=Ma=La=Ka=!1;var Pa;if(Pa=Oa()){var Qa=o.navigator;Ka=Pa.indexOf("Opera")==0;La=!Ka&&Pa.indexOf("MSIE")!=-1;Ma=!Ka&&Pa.indexOf("WebKit")!=-1;Na=!Ka&&!Ma&&Qa.product=="Gecko"}var Ra=Ka,w=La,x=Na,y=Ma,Sa=o.navigator,Ta=(Sa&&Sa.platform||"").indexOf("Mac")!=-1,Ua;
+a:{var Va="",Wa;if(Ra&&o.opera)var Xa=o.opera.version,Va=typeof Xa=="function"?Xa():Xa;else if(x?Wa=/rv\:([^\);]+)(\)|;)/:w?Wa=/MSIE\s+([^\);]+)(\)|;)/:y&&(Wa=/WebKit\/(\S+)/),Wa)var Ya=Wa.exec(Oa()),Va=Ya?Ya[1]:"";if(w){var Za,$a=o.document;Za=$a?$a.documentMode:i;if(Za>parseFloat(Va)){Ua=String(Za);break a}}Ua=Va}var ab=Ua,bb={},z=function(a){return bb[a]||(bb[a]=ua(ab,a)>=0)},cb={},db=function(){return cb[9]||(cb[9]=w&&document.documentMode&&document.documentMode>=9)};var eb,fb=!w||db();!x&&!w||w&&db()||x&&z("1.9.1");var gb=w&&!z("9");var hb=function(a){return(a=a.className)&&typeof a.split=="function"?a.split(/\s+/):[]},A=function(a,b){var c=hb(a),d=Da(arguments,1),f;f=c;for(var g=0,h=0;h<d.length;h++)za(f,d[h])||(f.push(d[h]),g++);f=g==d.length;a.className=c.join(" ");return f},ib=function(a,b){var c=hb(a),d=Da(arguments,1),f;f=c;for(var g=0,h=0;h<f.length;h++)za(d,f[h])&&(Ea(f,h--,1),g++);f=g==d.length;a.className=c.join(" ");return f};var lb=function(a){return a?new jb(kb(a)):eb||(eb=new jb)},mb=function(a){return p(a)?document.getElementById(a):a},nb=function(a,b,c){c=c||document;a=a&&a!="*"?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(!y||document.compatMode=="CSS1Compat"||z("528"))&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName)if(c=c.getElementsByClassName(b),a){for(var d={},f=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[f++]=h);d.length=f;return d}else return c;c=c.getElementsByTagName(a||
+"*");if(b){d={};for(g=f=0;h=c[g];g++)a=h.className,typeof a.split=="function"&&za(a.split(/\s+/),b)&&(d[f++]=h);d.length=f;return d}else return c},pb=function(a,b){Fa(b,function(b,d){d=="style"?a.style.cssText=b:d=="class"?a.className=b:d=="for"?a.htmlFor=b:d in ob?a.setAttribute(ob[d],b):d.lastIndexOf("aria-",0)==0?a.setAttribute(d,b):a[d]=b})},ob={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",rowspan:"rowSpan",valign:"vAlign",height:"height",width:"width",usemap:"useMap",
+frameborder:"frameBorder",maxlength:"maxLength",type:"type"},rb=function(a,b,c){return qb(document,arguments)},qb=function(a,b){var c=b[0],d=b[1];if(!fb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',sa(d.name),'"');if(d.type){c.push(' type="',sa(d.type),'"');var f={};Ja(f,d);d=f;delete d.type}c.push(">");c=c.join("")}c=a.createElement(c);if(d)p(d)?c.className=d:ea(d)?A.apply(m,[c].concat(d)):pb(c,d);b.length>2&&sb(a,c,b);return c},sb=function(a,b,c){function d(c){c&&b.appendChild(p(c)?
+a.createTextNode(c):c)}for(var f=2;f<c.length;f++){var g=c[f];if(fa(g)&&!(ga(g)&&g.nodeType>0)){var h;a:{if(g&&typeof g.length=="number")if(ga(g)){h=typeof g.item=="function"||typeof g.item=="string";break a}else if(q(g)){h=typeof g.item=="function";break a}h=!1}xa(h?Ca(g):g,d)}else d(g)}},tb=function(a){a&&a.parentNode&&a.parentNode.removeChild(a)},ub=function(a){for(;a&&a.nodeType!=1;)a=a.nextSibling;return a},vb=function(a,b){if(a.contains&&b.nodeType==1)return a==b||a.contains(b);if(typeof a.compareDocumentPosition!=
+"undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},kb=function(a){return a.nodeType==9?a:a.ownerDocument||a.document},wb=function(a,b){if("textContent"in a)a.textContent=b;else if(a.firstChild&&a.firstChild.nodeType==3){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);a.appendChild(kb(a).createTextNode(b))}},xb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},yb={IMG:" ",
+BR:"\n"},zb=function(a){var b=a.getAttributeNode("tabindex");return b&&b.specified?(a=a.tabIndex,typeof a=="number"&&a>=0&&a<32768):!1},Ab=function(a,b,c){if(!(a.nodeName in xb))if(a.nodeType==3)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in yb)b.push(yb[a.nodeName]);else for(a=a.firstChild;a;)Ab(a,b,c),a=a.nextSibling},jb=function(a){this.H=a||o.document||document};n=jb.prototype;n.Ka=lb;n.a=function(a){return p(a)?this.H.getElementById(a):a};
+n.l=function(a,b,c){return qb(this.H,arguments)};n.createElement=function(a){return this.H.createElement(a)};n.createTextNode=function(a){return this.H.createTextNode(a)};n.appendChild=function(a,b){a.appendChild(b)};n.contains=vb;var Bb=function(a){Bb[" "](a);return a};Bb[" "]=ba;var Cb,Db=!w||db(),Eb=w&&!z("8");var B=function(){};B.prototype.ab=!1;B.prototype.z=function(){if(!this.ab)this.ab=!0,this.f()};B.prototype.f=function(){this.gc&&Fb.apply(m,this.gc)};var Fb=function(a){for(var b=0,c=arguments.length;b<c;++b){var d=arguments[b];fa(d)?Fb.apply(m,d):d&&typeof d.z=="function"&&d.z()}};var C=function(a,b){this.type=a;this.currentTarget=this.target=b};t(C,B);n=C.prototype;n.f=function(){delete this.type;delete this.target;delete this.currentTarget};n.ba=!1;n.va=!0;n.stopPropagation=function(){this.ba=!0};n.preventDefault=function(){this.va=!1};var D=function(a,b){a&&this.ua(a,b)};t(D,C);var Gb=[1,4,2];n=D.prototype;n.target=m;n.relatedTarget=m;n.offsetX=0;n.offsetY=0;n.clientX=0;n.clientY=0;n.screenX=0;n.screenY=0;n.button=0;n.keyCode=0;n.charCode=0;n.ctrlKey=!1;n.altKey=!1;n.shiftKey=!1;n.metaKey=!1;n.Wb=!1;n.M=m;
+n.ua=function(a,b){var c=this.type=a.type;C.call(this,c);this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(x){var f;a:{try{Bb(d.nodeName);f=!0;break a}catch(g){}f=!1}f||(d=m)}}else if(c=="mouseover")d=a.fromElement;else if(c=="mouseout")d=a.toElement;this.relatedTarget=d;this.offsetX=a.offsetX!==i?a.offsetX:a.layerX;this.offsetY=a.offsetY!==i?a.offsetY:a.layerY;this.clientX=a.clientX!==i?a.clientX:a.pageX;this.clientY=a.clientY!==i?a.clientY:a.pageY;this.screenX=
+a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||(c=="keypress"?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.Wb=Ta?a.metaKey:a.ctrlKey;this.state=a.state;this.M=a;delete this.va;delete this.ba};var Hb=function(a){return Db?a.M.button==0:a.type=="click"?!0:!!(a.M.button&Gb[0])};
+D.prototype.stopPropagation=function(){D.c.stopPropagation.call(this);this.M.stopPropagation?this.M.stopPropagation():this.M.cancelBubble=!0};D.prototype.preventDefault=function(){D.c.preventDefault.call(this);var a=this.M;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,Eb)try{if(a.ctrlKey||a.keyCode>=112&&a.keyCode<=123)a.keyCode=-1}catch(b){}};D.prototype.f=function(){D.c.f.call(this);this.relatedTarget=this.currentTarget=this.target=this.M=m};var E=function(a,b){this.Cb=b;this.aa=[];a>this.Cb&&e(Error("[goog.structs.SimplePool] Initial cannot be greater than max"));for(var c=0;c<a;c++)this.aa.push(this.N?this.N():{})};t(E,B);E.prototype.N=m;E.prototype.Db=m;E.prototype.getObject=function(){return this.aa.length?this.aa.pop():this.N?this.N():{}};var Jb=function(a,b){a.aa.length<a.Cb?a.aa.push(b):Ib(a,b)},Ib=function(a,b){if(a.Db)a.Db(b);else if(ga(b))if(q(b.z))b.z();else for(var c in b)delete b[c]};
+E.prototype.f=function(){E.c.f.call(this);for(var a=this.aa;a.length;)Ib(this,a.pop());delete this.aa};var Kb,Lb=(Kb="ScriptEngine"in o&&o.ScriptEngine()=="JScript")?o.ScriptEngineMajorVersion()+"."+o.ScriptEngineMinorVersion()+"."+o.ScriptEngineBuildVersion():"0";var Mb=function(){},Nb=0;n=Mb.prototype;n.key=0;n.$=!1;n.Ab=!1;n.ua=function(a,b,c,d,f,g){q(a)?this.yb=!0:a&&a.handleEvent&&q(a.handleEvent)?this.yb=!1:e(Error("Invalid listener argument"));this.fa=a;this.rb=b;this.src=c;this.type=d;this.capture=!!f;this.Fa=g;this.Ab=!1;this.key=++Nb;this.$=!1};n.handleEvent=function(a){return this.yb?this.fa.call(this.Fa||this.src,a):this.fa.handleEvent.call(this.fa,a)};var Ob,Pb,Qb,Rb,Sb,Tb,Ub,Vb,Wb,Xb,Yb;
+(function(){function a(){return{I:0,F:0}}function b(){return[]}function c(){var a=function(b){b=h.call(a.src,a.key,b);if(!b)return b};return a}function d(){return new Mb}function f(){return new D}var g=Kb&&!(ua(Lb,"5.7")>=0),h;Tb=function(a){h=a};if(g){Ob=function(){return j.getObject()};Pb=function(a){Jb(j,a)};Qb=function(){return k.getObject()};Rb=function(a){Jb(k,a)};Sb=function(){return l.getObject()};Ub=function(){Jb(l,c())};Vb=function(){return M.getObject()};Wb=function(a){Jb(M,a)};Xb=function(){return r.getObject()};
+Yb=function(a){Jb(r,a)};var j=new E(0,600);j.N=a;var k=new E(0,600);k.N=b;var l=new E(0,600);l.N=c;var M=new E(0,600);M.N=d;var r=new E(0,600);r.N=f}else Ob=a,Pb=ba,Qb=b,Rb=ba,Sb=c,Ub=ba,Vb=d,Wb=ba,Xb=f,Yb=ba})();var Zb={},F={},G={},$b={},H=function(a,b,c,d,f){if(b)if(ea(b)){for(var g=0;g<b.length;g++)H(a,b[g],c,d,f);return m}else{var d=!!d,h=F;b in h||(h[b]=Ob());h=h[b];d in h||(h[d]=Ob(),h.I++);var h=h[d],j=s(a),k;h.F++;if(h[j]){k=h[j];for(g=0;g<k.length;g++)if(h=k[g],h.fa==c&&h.Fa==f){if(h.$)break;return k[g].key}}else k=h[j]=Qb(),h.I++;g=Sb();g.src=a;h=Vb();h.ua(c,g,a,b,d,f);c=h.key;g.key=c;k.push(h);Zb[c]=h;G[j]||(G[j]=Qb());G[j].push(h);a.addEventListener?(a==o||!a.qb)&&a.addEventListener(b,g,d):a.attachEvent(b in
+$b?$b[b]:$b[b]="on"+b,g);return c}else e(Error("Invalid event type"))},ac=function(a,b,c,d,f){if(ea(b))for(var g=0;g<b.length;g++)ac(a,b[g],c,d,f);else if(d=!!d,a=bc(a,b,d))for(g=0;g<a.length;g++)if(a[g].fa==c&&a[g].capture==d&&a[g].Fa==f){I(a[g].key);break}},I=function(a){if(!Zb[a])return!1;var b=Zb[a];if(b.$)return!1;var c=b.src,d=b.type,f=b.rb,g=b.capture;c.removeEventListener?(c==o||!c.qb)&&c.removeEventListener(d,f,g):c.detachEvent&&c.detachEvent(d in $b?$b[d]:$b[d]="on"+d,f);c=s(c);f=F[d][g][c];
+if(G[c]){var h=G[c];Aa(h,b);h.length==0&&delete G[c]}b.$=!0;f.wb=!0;cc(d,g,c,f);delete Zb[a];return!0},cc=function(a,b,c,d){if(!d.La&&d.wb){for(var f=0,g=0;f<d.length;f++)if(d[f].$){var h=d[f].rb;h.src=m;Ub(h);Wb(d[f])}else f!=g&&(d[g]=d[f]),g++;d.length=g;d.wb=!1;g==0&&(Rb(d),delete F[a][b][c],F[a][b].I--,F[a][b].I==0&&(Pb(F[a][b]),delete F[a][b],F[a].I--),F[a].I==0&&(Pb(F[a]),delete F[a]))}},dc=function(a){var b,c=0,d=b==m;b=!!b;if(a==m)Fa(G,function(a){for(var f=a.length-1;f>=0;f--){var g=a[f];
+if(d||b==g.capture)I(g.key),c++}});else if(a=s(a),G[a])for(var a=G[a],f=a.length-1;f>=0;f--){var g=a[f];if(d||b==g.capture)I(g.key),c++}},bc=function(a,b,c){var d=F;return b in d&&(d=d[b],c in d&&(d=d[c],a=s(a),d[a]))?d[a]:m},fc=function(a,b,c,d,f){var g=1,b=s(b);if(a[b]){a.F--;a=a[b];a.La?a.La++:a.La=1;try{for(var h=a.length,j=0;j<h;j++){var k=a[j];k&&!k.$&&(g&=ec(k,f)!==!1)}}finally{a.La--,cc(c,d,b,a)}}return Boolean(g)},ec=function(a,b){var c=a.handleEvent(b);a.Ab&&I(a.key);return c};
+Tb(function(a,b){if(!Zb[a])return!0;var c=Zb[a],d=c.type,f=F;if(!(d in f))return!0;var f=f[d],g,h;Cb===i&&(Cb=w&&!o.addEventListener);if(Cb){var j;if(!(j=b))a:{j="window.event".split(".");for(var k=o;g=j.shift();)if(k[g]!=m)k=k[g];else{j=m;break a}j=k}g=j;j=!0 in f;k=!1 in f;if(j){if(g.keyCode<0||g.returnValue!=i)return!0;a:{var l=!1;if(g.keyCode==0)try{g.keyCode=-1;break a}catch(M){l=!0}if(l||g.returnValue==i)g.returnValue=!0}}l=Xb();l.ua(g,this);g=!0;try{if(j){for(var r=Qb(),J=l.currentTarget;J;J=
+J.parentNode)r.push(J);h=f[!0];h.F=h.I;for(var P=r.length-1;!l.ba&&P>=0&&h.F;P--)l.currentTarget=r[P],g&=fc(h,r[P],d,!0,l);if(k){h=f[!1];h.F=h.I;for(P=0;!l.ba&&P<r.length&&h.F;P++)l.currentTarget=r[P],g&=fc(h,r[P],d,!1,l)}}else g=ec(c,l)}finally{if(r)r.length=0,Rb(r);l.z();Yb(l)}return g}d=new D(b,this);try{g=ec(c,d)}finally{d.z()}return g});var gc=function(a){this.Bb=a;this.Ma=[]};t(gc,B);var hc=[],K=function(a,b,c,d){ea(c)||(hc[0]=c,c=hc);for(var f=0;f<c.length;f++)a.Ma.push(H(b,c[f],d||a,!1,a.Bb||a));return a},L=function(a,b,c,d,f,g){if(ea(c))for(var h=0;h<c.length;h++)L(a,b,c[h],d,f,g);else{a:{d=d||a;g=g||a.Bb||a;f=!!f;if(b=bc(b,c,f))for(c=0;c<b.length;c++)if(!b[c].$&&b[c].fa==d&&b[c].capture==f&&b[c].Fa==g){b=b[c];break a}b=m}if(b)b=b.key,I(b),Aa(a.Ma,b)}return a},ic=function(a){xa(a.Ma,I);a.Ma.length=0};
+gc.prototype.f=function(){gc.c.f.call(this);ic(this)};gc.prototype.handleEvent=function(){e(Error("EventHandler.handleEvent not implemented"))};var jc=function(){};t(jc,B);n=jc.prototype;n.qb=!0;n.Ea=m;n.bb=function(a){this.Ea=a};n.addEventListener=function(a,b,c,d){H(this,a,b,c,d)};n.removeEventListener=function(a,b,c,d){ac(this,a,b,c,d)};
+n.dispatchEvent=function(a){var b=a.type||a,c=F;if(b in c){if(p(a))a=new C(a,this);else if(a instanceof C)a.target=a.target||this;else{var d=a,a=new C(b,this);Ja(a,d)}var d=1,f,c=c[b],b=!0 in c,g;if(b){f=[];for(g=this;g;g=g.Ea)f.push(g);g=c[!0];g.F=g.I;for(var h=f.length-1;!a.ba&&h>=0&&g.F;h--)a.currentTarget=f[h],d&=fc(g,f[h],a.type,!0,a)&&a.va!=!1}if(!1 in c)if(g=c[!1],g.F=g.I,b)for(h=0;!a.ba&&h<f.length&&g.F;h++)a.currentTarget=f[h],d&=fc(g,f[h],a.type,!1,a)&&a.va!=!1;else for(f=this;!a.ba&&f&&
+g.F;f=f.Ea)a.currentTarget=f,d&=fc(g,f,a.type,!1,a)&&a.va!=!1;a=Boolean(d)}else a=!0;return a};n.f=function(){jc.c.f.call(this);dc(this);this.Ea=m};var N=function(a,b){a.style.display=b?"":"none"},kc=x?"MozUserSelect":y?"WebkitUserSelect":m,lc=function(a,b,c){c=!c?a.getElementsByTagName("*"):m;if(kc){if(b=b?"none":"",a.style[kc]=b,c)for(var a=0,d;d=c[a];a++)d.style[kc]=b}else if(w||Ra)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var mc=function(){};ca(mc);mc.prototype.Zb=0;mc.Q();var O=function(a){this.r=a||lb();this.sa=nc};t(O,jc);O.prototype.Yb=mc.Q();var nc=m,oc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}e(Error("Invalid component state"))};n=O.prototype;n.ga=m;n.e=!1;n.d=m;n.sa=m;n.Lb=m;n.p=m;n.q=m;n.t=m;n.lb=!1;
+var pc=function(a){return a.ga||(a.ga=":"+(a.Yb.Zb++).toString(36))},qc=function(a,b){if(a.p&&a.p.t){var c=a.p.t,d=a.ga;d in c&&delete c[d];Ga(a.p.t,b,a)}a.ga=b};O.prototype.a=function(){return this.d};var rc=function(a){return a.da||(a.da=new gc(a))},tc=function(a,b){a==b&&e(Error("Unable to set parent component"));b&&a.p&&a.ga&&sc(a.p,a.ga)&&a.p!=b&&e(Error("Unable to set parent component"));a.p=b;O.c.bb.call(a,b)};n=O.prototype;n.getParent=function(){return this.p};
+n.bb=function(a){this.p&&this.p!=a&&e(Error("Method not supported"));O.c.bb.call(this,a)};n.Ka=function(){return this.r};n.l=function(){this.d=this.r.createElement("div")};n.K=function(a){if(this.e)e(Error("Component already rendered"));else if(a&&this.Z(a)){this.lb=!0;if(!this.r||this.r.H!=kb(a))this.r=lb(a);this.Ua(a);this.s()}else e(Error("Invalid element to decorate"))};n.Z=function(){return!0};n.Ua=function(a){this.d=a};n.s=function(){this.e=!0;uc(this,function(a){!a.e&&a.a()&&a.s()})};
+n.V=function(){uc(this,function(a){a.e&&a.V()});this.da&&ic(this.da);this.e=!1};n.f=function(){O.c.f.call(this);this.e&&this.V();this.da&&(this.da.z(),delete this.da);uc(this,function(a){a.z()});!this.lb&&this.d&&tb(this.d);this.p=this.Lb=this.d=this.t=this.q=m};n.Da=function(a,b){this.Ra(a,vc(this),b)};
+n.Ra=function(a,b,c){a.e&&(c||!this.e)&&e(Error("Component already rendered"));(b<0||b>vc(this))&&e(Error("Child component index out of bounds"));if(!this.t||!this.q)this.t={},this.q=[];a.getParent()==this?(this.t[pc(a)]=a,Aa(this.q,a)):Ga(this.t,pc(a),a);tc(a,this);Ea(this.q,b,0,a);a.e&&this.e&&a.getParent()==this?(c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||m)):c?(this.d||this.l(),c=Q(this,b+1),b=this.B(),c=c?c.d:m,a.e&&e(Error("Component already rendered")),a.d||a.l(),b?b.insertBefore(a.d,
+c||m):a.r.H.body.appendChild(a.d),(!a.p||a.p.e)&&a.s()):this.e&&!a.e&&a.d&&a.s()};n.B=function(){return this.d};var wc=function(a){if(a.sa==m){var b;a:{b=a.e?a.d:a.r.H.body;var c=kb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(b=c.defaultView.getComputedStyle(b,m))){b=b.direction||b.getPropertyValue("direction");break a}b=""}a.sa="rtl"==(b||((a.e?a.d:a.r.H.body).currentStyle?(a.e?a.d:a.r.H.body).currentStyle.direction:m)||(a.e?a.d:a.r.H.body).style.direction)}return a.sa};
+O.prototype.pa=function(a){this.e&&e(Error("Component already rendered"));this.sa=a};var vc=function(a){return a.q?a.q.length:0},sc=function(a,b){return a.t&&b?(b in a.t?a.t[b]:i)||m:m},Q=function(a,b){return a.q?a.q[b]||m:m},uc=function(a,b,c){a.q&&xa(a.q,b,c)},xc=function(a,b){return a.q&&b?wa(a.q,b):-1};
+O.prototype.removeChild=function(a,b){if(a){var c=p(a)?a:pc(a),a=sc(this,c);if(c&&a){var d=this.t;c in d&&delete d[c];Aa(this.q,a);b&&(a.V(),a.d&&tb(a.d));tc(a,m)}}a||e(Error("Child is not in parent component"));return a};var yc=function(a,b){a.setAttribute("role",b);a.mc=b};var Ac=function(a,b,c,d,f){if(!w&&(!y||!z("525")))return!0;if(Ta&&f)return zc(a);if(f&&!d)return!1;if(!c&&(b==17||b==18))return!1;if(w&&d&&b==a)return!1;switch(a){case 13:return!(w&&db());case 27:return!y}return zc(a)},zc=function(a){if(a>=48&&a<=57)return!0;if(a>=96&&a<=106)return!0;if(a>=65&&a<=90)return!0;if(y&&a==0)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;default:return!1}};var R=function(a,b){a&&Bc(this,a,b)};t(R,jc);n=R.prototype;n.d=m;n.Ia=m;n.Wa=m;n.Ja=m;n.S=-1;n.R=-1;
+var Cc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},Dc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},Ec={61:187,59:186},Fc=w||y&&z("525");
+R.prototype.Qb=function(a){if(y&&(this.S==17&&!a.ctrlKey||this.S==18&&!a.altKey))this.R=this.S=-1;Fc&&!Ac(a.keyCode,this.S,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):this.R=x&&a.keyCode in Ec?Ec[a.keyCode]:a.keyCode};R.prototype.Rb=function(){this.R=this.S=-1};
+R.prototype.handleEvent=function(a){var b=a.M,c,d;w&&a.type=="keypress"?(c=this.R,d=c!=13&&c!=27?b.keyCode:0):y&&a.type=="keypress"?(c=this.R,d=b.charCode>=0&&b.charCode<63232&&zc(c)?b.charCode:0):Ra?(c=this.R,d=zc(c)?b.keyCode:0):(c=b.keyCode||this.R,d=b.charCode||0,Ta&&d==63&&!c&&(c=191));var f=c,g=b.keyIdentifier;c?c>=63232&&c in Cc?f=Cc[c]:c==25&&a.shiftKey&&(f=9):g&&g in Dc&&(f=Dc[g]);a=f==this.S;this.S=f;b=new Gc(f,d,a,b);try{this.dispatchEvent(b)}finally{b.z()}};R.prototype.a=function(){return this.d};
+var Bc=function(a,b,c){a.Ja&&a.detach();a.d=b;a.Ia=H(a.d,"keypress",a,c);a.Wa=H(a.d,"keydown",a.Qb,c,a);a.Ja=H(a.d,"keyup",a.Rb,c,a)};R.prototype.detach=function(){if(this.Ia)I(this.Ia),I(this.Wa),I(this.Ja),this.Ja=this.Wa=this.Ia=m;this.d=m;this.R=this.S=-1};R.prototype.f=function(){R.c.f.call(this);this.detach()};var Gc=function(a,b,c,d){d&&this.ua(d,i);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};t(Gc,D);var Ic=function(a,b){a||e(Error("Invalid class name "+a));q(b)||e(Error("Invalid decorator function "+b));Hc[a]=b},Jc={},Hc={};var S=function(){},Kc;ca(S);n=S.prototype;n.ea=function(){};n.l=function(a){var b=a.Ka().l("div",this.ta(a).join(" "),a.xa);Lc(a,b);return b};n.B=function(a){return a};n.ra=function(a,b,c){if(a=a.a?a.a():a)if(w&&!z("7")){var d=Mc(hb(a),b);d.push(b);ja(c?A:ib,a).apply(m,d)}else c?A(a,b):ib(a,b)};n.Z=function(){return!0};
+n.K=function(a,b){b.id&&qc(a,b.id);var c=this.B(b);a.xa=c&&c.firstChild?c.firstChild.nextSibling?Ca(c.childNodes):c.firstChild:m;var d=0,f=this.m(),g=this.m(),h=!1,j=!1,c=!1,k=hb(b);xa(k,function(a){if(!h&&a==f)h=!0,g==f&&(j=!0);else if(!j&&a==g)j=!0;else{var b=d;if(!this.ob)this.Ga||Nc(this),this.ob=Ha(this.Ga);a=parseInt(this.ob[a],10);d=b|(isNaN(a)?0:a)}},this);a.g=d;h||(k.push(f),g==f&&(j=!0));j||k.push(g);var l=a.C;l&&k.push.apply(k,l);if(w&&!z("7")){var M=Mc(k);M.length>0&&(k.push.apply(k,M),
+c=!0)}if(!h||!j||l||c)b.className=k.join(" ");Lc(a,b);return b};n.Na=function(a){wc(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.la(a,a.G())};var Lc=function(a,b){u(a);u(b);a.isEnabled()||Oc(b,1,!0);a.g&8&&Oc(b,8,!0);a.n&16&&Oc(b,16,!!(a.g&16));a.n&64&&Oc(b,64,!!(a.g&64))};n=S.prototype;n.za=function(a,b){lc(a,!b,!w&&!Ra)};n.pa=function(a,b){this.ra(a,this.m()+"-rtl",b)};n.U=function(a){var b;return a.n&32&&(b=a.k())?zb(b):!1};
+n.la=function(a,b){var c;if(a.n&32&&(c=a.k())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.ma(m)}if(zb(c)!=b)b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex"))}};n.ia=function(a,b){N(a,b)};n.v=function(a,b,c){var d=a.a();if(d){var f=Pc(this,b);f&&this.ra(a,f,c);Oc(d,b,c)}};var Oc=function(a,b,c){Kc||(Kc={1:"disabled",8:"selected",16:"checked",64:"expanded"});(b=Kc[b])&&a.setAttribute("aria-"+b,c)};S.prototype.k=function(a){return a.a()};S.prototype.m=function(){return"goog-control"};
+S.prototype.ta=function(a){var b=this.m(),c=[b],d=this.m();d!=b&&c.push(d);b=a.g;for(d=[];b;){var f=b&-b;d.push(Pc(this,f));b&=~f}c.push.apply(c,d);(a=a.C)&&c.push.apply(c,a);w&&!z("7")&&c.push.apply(c,Mc(c));return c};
+var Mc=function(a,b){var c=[];b&&(a=a.concat([b]));xa([],function(d){ya(d,ja(za,a))&&(!b||za(d,b))&&c.push(d.join("_"))});return c},Pc=function(a,b){a.Ga||Nc(a);return a.Ga[b]},Nc=function(a){var b=a.m();a.Ga={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var T=function(a,b,c){O.call(this,c);if(!b){for(var b=this.constructor,d;b;){d=s(b);if(d=Jc[d])break;b=b.c?b.c.constructor:m}b=d?q(d.Q)?d.Q():new d:m}this.b=b;this.xa=a};t(T,O);n=T.prototype;n.xa=m;n.g=0;n.n=39;n.Xb=255;n.Ba=0;n.o=!0;n.C=m;n.ja=!0;n.ya=!1;n.k=function(){return this.b.k(this)};n.Aa=function(){return this.u||(this.u=new R)};n.vb=function(){return this.b};
+n.ra=function(a,b){if(b){if(a)this.C?za(this.C,a)||this.C.push(a):this.C=[a],this.b.ra(this,a,!0)}else if(a&&this.C){Aa(this.C,a);if(this.C.length==0)this.C=m;this.b.ra(this,a,!1)}};n.l=function(){var a=this.b.l(this);this.d=a;var b=this.b.ea();b&&yc(a,b);this.ya||this.b.za(a,!1);this.G()||this.b.ia(a,!1)};n.B=function(){return this.b.B(this.a())};n.Z=function(a){return this.b.Z(a)};
+n.Ua=function(a){this.d=a=this.b.K(this,a);var b=this.b.ea();b&&yc(a,b);this.ya||this.b.za(a,!1);this.o=a.style.display!="none"};n.s=function(){T.c.s.call(this);this.b.Na(this);if(this.n&-2&&(this.ja&&Qc(this,!0),this.n&32)){var a=this.k();if(a){var b=this.Aa();Bc(b,a);K(K(K(rc(this),b,"key",this.O),a,"focus",this.oa),a,"blur",this.ma)}}};
+var Qc=function(a,b){var c=rc(a),d=a.a();b?(K(K(K(K(c,d,"mouseover",a.Ya),d,"mousedown",a.ka),d,"mouseup",a.Za),d,"mouseout",a.Xa),w&&K(c,d,"dblclick",a.sb)):(L(L(L(L(c,d,"mouseover",a.Ya),d,"mousedown",a.ka),d,"mouseup",a.Za),d,"mouseout",a.Xa),w&&L(c,d,"dblclick",a.sb))};n=T.prototype;n.V=function(){T.c.V.call(this);this.u&&this.u.detach();this.G()&&this.isEnabled()&&this.b.la(this,!1)};n.f=function(){T.c.f.call(this);this.u&&(this.u.z(),delete this.u);delete this.b;this.C=this.xa=m};
+n.pa=function(a){T.c.pa.call(this,a);var b=this.a();b&&this.b.pa(b,a)};n.za=function(a){this.ya=a;var b=this.a();b&&this.b.za(b,a)};n.G=function(){return this.o};n.ia=function(a,b){if(b||this.o!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ia(c,a);this.isEnabled()&&this.b.la(this,a);this.o=a;return!0}return!1};n.isEnabled=function(){return!(this.g&1)};
+n.qa=function(a){var b=this.getParent();if((!b||typeof b.isEnabled!="function"||b.isEnabled())&&U(this,1,!a))a||(this.setActive(!1),this.A(!1)),this.G()&&this.b.la(this,a),this.v(1,!a)};n.A=function(a){U(this,2,a)&&this.v(2,a)};n.setActive=function(a){U(this,4,a)&&this.v(4,a)};var Rc=function(a,b){U(a,8,b)&&a.v(8,b)},Sc=function(a,b){U(a,64,b)&&a.v(64,b)};T.prototype.v=function(a,b){if(this.n&a&&b!=!!(this.g&a))this.b.v(this,a,b),this.g=b?this.g|a:this.g&~a};
+var Tc=function(a,b,c){a.e&&a.g&b&&!c&&e(Error("Component already rendered"));!c&&a.g&b&&a.v(b,!1);a.n=c?a.n|b:a.n&~b},V=function(a,b){return!!(a.Xb&b)&&!!(a.n&b)},U=function(a,b,c){return!!(a.n&b)&&!!(a.g&b)!=c&&(!(a.Ba&b)||a.dispatchEvent(oc(b,c)))&&!a.ab};n=T.prototype;n.Ya=function(a){(!a.relatedTarget||!vb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&V(this,2)&&this.A(!0)};
+n.Xa=function(a){if((!a.relatedTarget||!vb(this.a(),a.relatedTarget))&&this.dispatchEvent("leave"))V(this,4)&&this.setActive(!1),V(this,2)&&this.A(!1)};n.ka=function(a){if(this.isEnabled()&&(V(this,2)&&this.A(!0),Hb(a)&&(!y||!Ta||!a.ctrlKey)))V(this,4)&&this.setActive(!0),this.b.U(this)&&this.k().focus();!this.ya&&Hb(a)&&(!y||!Ta||!a.ctrlKey)&&a.preventDefault()};n.Za=function(a){this.isEnabled()&&(V(this,2)&&this.A(!0),this.g&4&&Uc(this,a)&&V(this,4)&&this.setActive(!1))};
+n.sb=function(a){this.isEnabled()&&Uc(this,a)};var Uc=function(a,b){if(V(a,16)){var c=!(a.g&16);U(a,16,c)&&a.v(16,c)}V(a,8)&&Rc(a,!0);V(a,64)&&Sc(a,!(a.g&64));c=new C("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey","platformModifierKey"],f,g=0;f=d[g];g++)c[f]=b[f];return a.dispatchEvent(c)};T.prototype.oa=function(){V(this,32)&&U(this,32,!0)&&this.v(32,!0)};T.prototype.ma=function(){V(this,4)&&this.setActive(!1);V(this,32)&&U(this,32,!1)&&this.v(32,!1)};
+T.prototype.O=function(a){return this.G()&&this.isEnabled()&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};T.prototype.jb=function(a){return a.keyCode==13&&Uc(this,a)};q(T)||e(Error("Invalid component class "+T));q(S)||e(Error("Invalid renderer class "+S));var Vc=s(T);Jc[Vc]=S;Ic("goog-control",function(){return new T(m)});var Wc=function(){};t(Wc,S);ca(Wc);Wc.prototype.l=function(a){return a.Ka().l("div",this.m())};Wc.prototype.K=function(a,b){if(b.tagName=="HR"){var c=b,b=this.l(a);c.parentNode&&c.parentNode.insertBefore(b,c);tb(c)}else A(b,this.m());return b};Wc.prototype.m=function(){return"goog-menuseparator"};var Xc=function(a,b){T.call(this,m,a||Wc.Q(),b);Tc(this,1,!1);Tc(this,2,!1);Tc(this,4,!1);Tc(this,32,!1);this.g=1};t(Xc,T);Xc.prototype.s=function(){Xc.c.s.call(this);yc(this.a(),"separator")};Ic("goog-menuseparator",function(){return new Xc});var W=function(){};ca(W);W.prototype.ea=function(){};var Yc=function(a,b){if(a)a.tabIndex=b?0:-1};n=W.prototype;n.l=function(a){return a.Ka().l("div",this.ta(a).join(" "))};n.B=function(a){return a};n.Z=function(a){return a.tagName=="DIV"};n.K=function(a,b){b.id&&qc(a,b.id);var c=this.m(),d=!1,f=hb(b);f&&xa(f,function(b){b==c?d=!0:b&&this.Va(a,b,c)},this);d||A(b,c);Zc(a,this.B(b));return b};
+n.Va=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?$c(a,"horizontal"):b==c+"-vertical"&&$c(a,"vertical")};var Zc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(c.nodeType==1){var f;a:{f=i;for(var g=hb(c),h=0,j=g.length;h<j;h++)if(f=g[h]in Hc?Hc[g[h]]():m)break a;f=m}if(f)f.d=c,a.isEnabled()||f.qa(!1),a.Da(f),f.K(c)}else(!c.nodeValue||ma(c.nodeValue)=="")&&b.removeChild(c);c=d}};
+W.prototype.Na=function(a){a=a.a();lc(a,!0,x);if(w)a.hideFocus=!0;var b=this.ea();b&&yc(a,b)};W.prototype.k=function(a){return a.a()};W.prototype.m=function(){return"goog-container"};W.prototype.ta=function(a){var b=this.m(),c=[b,a.P=="horizontal"?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var X=function(a,b,c){O.call(this,c);this.b=b||W.Q();this.P=a||"vertical"};t(X,O);n=X.prototype;n.Oa=m;n.u=m;n.b=m;n.P=m;n.o=!0;n.X=!0;n.$a=!0;n.i=-1;n.h=m;n.ca=!1;n.Pb=!1;n.Mb=!0;n.J=m;n.k=function(){return this.Oa||this.b.k(this)};n.Aa=function(){return this.u||(this.u=new R(this.k()))};n.vb=function(){return this.b};n.l=function(){this.d=this.b.l(this)};n.B=function(){return this.b.B(this.a())};n.Z=function(a){return this.b.Z(a)};
+n.Ua=function(a){this.d=this.b.K(this,a);if(a.style.display=="none")this.o=!1};n.s=function(){X.c.s.call(this);uc(this,function(a){a.e&&ad(this,a)},this);var a=this.a();this.b.Na(this);this.ia(this.o,!0);K(K(K(K(K(K(K(K(rc(this),this,"enter",this.Hb),this,"highlight",this.Ib),this,"unhighlight",this.Kb),this,"open",this.Jb),this,"close",this.Fb),a,"mousedown",this.ka),kb(a),"mouseup",this.Gb),a,["mousedown","mouseup","mouseover","mouseout"],this.Eb);this.U()&&bd(this,!0)};
+var bd=function(a,b){var c=rc(a),d=a.k();b?K(K(K(c,d,"focus",a.oa),d,"blur",a.ma),a.Aa(),"key",a.O):L(L(L(c,d,"focus",a.oa),d,"blur",a.ma),a.Aa(),"key",a.O)};n=X.prototype;n.V=function(){cd(this,-1);this.h&&Sc(this.h,!1);this.ca=!1;X.c.V.call(this)};n.f=function(){X.c.f.call(this);if(this.u)this.u.z(),this.u=m;this.b=this.h=this.J=this.Oa=m};n.Hb=function(){return!0};
+n.Ib=function(a){var b=xc(this,a.target);if(b>-1&&b!=this.i){var c=Q(this,this.i);c&&c.A(!1);this.i=b;c=Q(this,this.i);this.ca&&c.setActive(!0);this.Mb&&this.h&&c!=this.h&&(c.n&64?Sc(c,!0):Sc(this.h,!1))}this.a().setAttribute("aria-activedescendant",a.target.a().id)};n.Kb=function(a){if(a.target==Q(this,this.i))this.i=-1;this.a().setAttribute("aria-activedescendant","")};n.Jb=function(a){if((a=a.target)&&a!=this.h&&a.getParent()==this)this.h&&Sc(this.h,!1),this.h=a};
+n.Fb=function(a){if(a.target==this.h)this.h=m};n.ka=function(a){if(this.X)this.ca=!0;var b=this.k();b&&zb(b)?b.focus():a.preventDefault()};n.Gb=function(){this.ca=!1};n.Eb=function(a){var b;a:{b=a.target;if(this.J)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.J){b=this.J[d];break a}b=b.parentNode}b=m}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Za(a);break;case "mouseover":b.Ya(a);break;case "mouseout":b.Xa(a)}};n.oa=function(){};
+n.ma=function(){cd(this,-1);this.ca=!1;this.h&&Sc(this.h,!1)};n.O=function(a){return this.isEnabled()&&this.G()&&(vc(this)!=0||this.Oa)&&this.jb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
+n.jb=function(a){var b=Q(this,this.i);if(b&&typeof b.O=="function"&&b.O(a))return!0;if(this.h&&this.h!=b&&typeof this.h.O=="function"&&this.h.O(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.U())this.k().blur();else return!1;break;case 36:dd(this);break;case 35:ed(this);break;case 38:if(this.P=="vertical")fd(this);else return!1;break;case 37:if(this.P=="horizontal")wc(this)?gd(this):fd(this);else return!1;break;case 40:if(this.P=="vertical")gd(this);
+else return!1;break;case 39:if(this.P=="horizontal")wc(this)?fd(this):gd(this);else return!1;break;default:return!1}return!0};var ad=function(a,b){var c=b.a(),c=c.id||(c.id=pc(b));if(!a.J)a.J={};a.J[c]=b};X.prototype.Da=function(a,b){X.c.Da.call(this,a,b)};X.prototype.Ra=function(a,b,c){a.Ba|=2;a.Ba|=64;(this.U()||!this.Pb)&&Tc(a,32,!1);a.e&&!1!=a.ja&&Qc(a,!1);a.ja=!1;X.c.Ra.call(this,a,b,c);c&&this.e&&ad(this,a);b<=this.i&&this.i++};
+X.prototype.removeChild=function(a,b){if(a=p(a)?sc(this,a):a){var c=xc(this,a);c!=-1&&(c==this.i?a.A(!1):c<this.i&&this.i--);var d=a.a();if(d&&d.id&&this.J)c=this.J,d=d.id,d in c&&delete c[d]}c=a=X.c.removeChild.call(this,a,b);c.e&&!0!=c.ja&&Qc(c,!0);c.ja=!0;return a};var $c=function(a,b){a.a()&&e(Error("Component already rendered"));a.P=b};n=X.prototype;n.G=function(){return this.o};
+n.ia=function(a,b){if(b||this.o!=a&&this.dispatchEvent(a?"show":"hide")){this.o=a;var c=this.a();c&&(N(c,a),this.U()&&Yc(this.k(),this.X&&this.o),b||this.dispatchEvent(this.o?"aftershow":"afterhide"));return!0}return!1};n.isEnabled=function(){return this.X};n.qa=function(a){if(this.X!=a&&this.dispatchEvent(a?"enable":"disable"))a?(this.X=!0,uc(this,function(a){a.pb?delete a.pb:a.qa(!0)})):(uc(this,function(a){a.isEnabled()?a.qa(!1):a.pb=!0}),this.ca=this.X=!1),this.U()&&Yc(this.k(),a&&this.o)};
+n.U=function(){return this.$a};n.la=function(a){a!=this.$a&&this.e&&bd(this,a);this.$a=a;this.X&&this.o&&Yc(this.k(),a)};var cd=function(a,b){var c=Q(a,b);c?c.A(!0):a.i>-1&&Q(a,a.i).A(!1)};X.prototype.A=function(a){cd(this,xc(this,a))};
+var dd=function(a){hd(a,function(a,c){return(a+1)%c},vc(a)-1)},ed=function(a){hd(a,function(a,c){a--;return a<0?c-1:a},0)},gd=function(a){hd(a,function(a,c){return(a+1)%c},a.i)},fd=function(a){hd(a,function(a,c){a--;return a<0?c-1:a},a.i)},hd=function(a,b,c){for(var c=c<0?xc(a,a.h):c,d=vc(a),c=b.call(a,c,d),f=0;f<=d;){var g=Q(a,c);if(g&&g.G()&&g.isEnabled()&&g.n&2){a.Ta(c);break}f++;c=b.call(a,c,d)}};X.prototype.Ta=function(a){cd(this,a)};var id=function(){};t(id,S);ca(id);n=id.prototype;n.m=function(){return"goog-tab"};n.ea=function(){return"tab"};n.l=function(a){var b=id.c.l.call(this,a);(a=a.Pa())&&this.Sa(b,a);return b};n.K=function(a,b){var b=id.c.K.call(this,a,b),c=this.Pa(b);if(c)a.mb=c;if(a.g&8&&(c=a.getParent())&&q(c.W))a.v(8,!1),c.W(a);return b};n.Pa=function(a){return a.title||""};n.Sa=function(a,b){if(a)a.title=b||""};var jd=function(a,b,c){T.call(this,a,b||id.Q(),c);Tc(this,8,!0);this.Ba|=9};t(jd,T);jd.prototype.Pa=function(){return this.mb};jd.prototype.Sa=function(a){this.vb().Sa(this.a(),a);this.mb=a};Ic("goog-tab",function(){return new jd(m)});var Y=function(){};t(Y,W);ca(Y);Y.prototype.m=function(){return"goog-tab-bar"};Y.prototype.ea=function(){return"tablist"};Y.prototype.Va=function(a,b,c){if(!this.ub)this.Ha||kd(this),this.ub=Ha(this.Ha);var d=this.ub[b];d?($c(a,ld(d)),a.nb=d):Y.c.Va.call(this,a,b,c)};Y.prototype.ta=function(a){var b=Y.c.ta.call(this,a);this.Ha||kd(this);b.push(this.Ha[a.nb]);return b};var kd=function(a){var b=a.m();a.Ha={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var Z=function(a,b,c){a=a||"top";$c(this,ld(a));this.nb=a;X.call(this,this.P,b||Y.Q(),c);md(this)};t(Z,X);n=Z.prototype;n.Sb=!0;n.D=m;n.s=function(){Z.c.s.call(this);md(this)};n.f=function(){Z.c.f.call(this);this.D=m};n.removeChild=function(a,b){nd(this,a);return Z.c.removeChild.call(this,a,b)};n.Ta=function(a){Z.c.Ta.call(this,a);this.Sb&&this.W(Q(this,a))};n.W=function(a){a?Rc(a,!0):this.D&&Rc(this.D,!1)};
+var nd=function(a,b){if(b&&b==a.D){for(var c=xc(a,b),d=c-1;b=Q(a,d);d--)if(b.G()&&b.isEnabled()){a.W(b);return}for(c+=1;b=Q(a,c);c++)if(b.G()&&b.isEnabled()){a.W(b);return}a.W(m)}};n=Z.prototype;n.cc=function(a){this.D&&this.D!=a.target&&Rc(this.D,!1);this.D=a.target};n.dc=function(a){if(a.target==this.D)this.D=m};n.ac=function(a){nd(this,a.target)};n.bc=function(a){nd(this,a.target)};n.oa=function(){Q(this,this.i)||this.A(this.D||Q(this,0))};
+var md=function(a){K(K(K(K(rc(a),a,"select",a.cc),a,"unselect",a.dc),a,"disable",a.ac),a,"hide",a.bc)},ld=function(a){return a=="start"||a=="end"?"vertical":"horizontal"};Ic("goog-tab-bar",function(){return new Z});var od=function(a,b,c,d,f){function g(a){if(a)a.tabIndex=0,H(a,"click",h.Nb,!1,h),H(a,"keydown",h.Ob,!1,h)}this.r=f||lb();this.L=this.r.a(a)||m;this.na=this.r.a(d||m);this.Ca=(this.Qa=q(b)?b:m)||!b?m:this.r.a(b);this.j=c==!0;var h=this;g(this.L);g(this.na);this.Y(this.j)};t(od,jc);n=od.prototype;n.f=function(){this.L&&dc(this.L);this.na&&dc(this.na);od.c.f.call(this)};n.B=function(){return this.Ca};n.toggle=function(){this.Y(!this.j)};
+n.Y=function(a){if(this.Ca)N(this.Ca,a);else if(a&&this.Qa)this.Ca=this.Qa();if(this.na)N(this.L,!a),N(this.na,a);else{if(this.L){var b=this.L;a?A(b,"goog-zippy-expanded"):ib(b,"goog-zippy-expanded");b=this.L;!a?A(b,"goog-zippy-collapsed"):ib(b,"goog-zippy-collapsed")}this.L&&this.L.setAttribute("aria-expanded",a)}this.j=a;this.dispatchEvent(new pd("toggle",this,this.j))};n.Ob=function(a){if(a.keyCode==13||a.keyCode==32)this.toggle(),a.preventDefault(),a.stopPropagation()};n.Nb=function(){this.toggle()};
+var pd=function(a,b,c){C.call(this,a,b);this.kc=c};t(pd,C);var rd=function(a,b){this.kb=[];for(var c=mb(a),c=nb("span","ae-zippy",c),d=0,f;f=c[d];d++)this.kb.push(new od(f,f.parentNode.parentNode.parentNode.nextElementSibling!=i?f.parentNode.parentNode.parentNode.nextElementSibling:ub(f.parentNode.parentNode.parentNode.nextSibling),!1));this.ec=new qd(this.kb,mb(b))};rd.prototype.ic=function(){return this.ec};rd.prototype.jc=function(){return this.kb};
+var qd=function(a,b){this.wa=a;if(this.wa.length)for(var c=0,d;d=this.wa[c];c++)H(d,"toggle",this.Ub,!1,this);this.ib=0;this.j=!1;c="ae-toggle ae-plus ae-action";this.wa.length||(c+=" ae-disabled");this.T=rb("span",{className:c},"Expand All");H(this.T,"click",this.Tb,!1,this);b&&b.appendChild(this.T)};qd.prototype.Tb=function(){this.wa.length&&this.Y(!this.j)};
+qd.prototype.Ub=function(a){a=a.currentTarget;a.j?this.ib+=1:this.ib-=1;if(a.j!=this.j)if(a.j)this.j=!0,sd(this,!0);else if(this.ib==0)this.j=!1,sd(this,!1)};qd.prototype.Y=function(a){this.j=a;for(var a=0,b;b=this.wa[a];a++)b.j!=this.j&&b.Y(this.j);sd(this)};
+var sd=function(a,b){(b!==i?b:a.j)?(ib(a.T,"ae-plus"),A(a.T,"ae-minus"),wb(a.T,"Collapse All")):(ib(a.T,"ae-minus"),A(a.T,"ae-plus"),wb(a.T,"Expand All"))},td=function(a){this.Vb=a;this.zb={};var b,c=rb("div",{},b=rb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),rb("div",{className:"goog-tab-bar-clear"}),a=rb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Z;d.K(b);H(d,"select",this.xb,!1,this);H(d,"unselect",this.xb,!1,this);b=0;for(var f;f=
+this.Vb[b];b++)if(f=mb("ae-stats-details-"+f)){var g=nb("h2",m,f)[0],h;h=g;var j=i;gb&&"innerText"in h?j=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(j=[],Ab(h,j,!0),j=j.join(""));j=j.replace(/ \xAD /g," ").replace(/\xAD/g,"");j=j.replace(/\u200B/g,"");gb||(j=j.replace(/ +/g," "));j!=" "&&(j=j.replace(/^\s*/,""));h=j;tb(g);g=new jd(h);this.zb[s(g)]=f;d.Da(g,!0);a.appendChild(f);b==0?d.W(g):N(f,!1)}mb("bd").appendChild(c)};td.prototype.xb=function(a){var b=this.zb[s(a.target)];N(b,a.type=="select")};
+aa("ae.Stats.Details.Tabs",td);aa("goog.ui.Zippy",od);od.prototype.setExpanded=od.prototype.Y;aa("ae.Stats.MakeZippys",rd);rd.prototype.getExpandCollapse=rd.prototype.ic;rd.prototype.getZippys=rd.prototype.jc;qd.prototype.setExpanded=qd.prototype.Y;var $=function(){this.cb=[];this.hb=[]},ud=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],vd=function(a){if(a<=0)return[2,0.5,1];for(var b=1;a<1;)a*=10,b/=10;for(;a>=10;)a/=10,b*=10;for(var c=0;c<ud.length;c++)if(a<=ud[c][2])return[ud[c][0],ud[c][1]*b,ud[c][2]*b];return[5,2*b,10*b]};$.prototype.gb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.hb.push(a)};
+var wd=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var f=0;f<=b;f++)a.write('<img class="'+a.w+'tick" src="'+a.gb+'" alt="" '),a.write('style="left:'+f*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+f*c*d+'%">'),a.write(" "+f*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.hc=function(){this.hb=[];var a=vd(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');wd(this,b,c,a);for(var d=0;d<this.cb.length;d++){var f=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');f.label.length>0&&(f.ha.length>0&&this.write('<a class="'+this.w+'link" href="'+f.ha+'">'),this.write(f.label),f.ha.length>0&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');f.ha.length>0&&this.write('<a class="'+
this.w+'link" href="'+f.ha+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.gb+'" alt="" ');this.write('style="left:'+f.start*a+"%;width:"+f.duration*a+'%;min-width:1px"\n>');f.eb>0&&(this.write('<img class="'+this.w+'extra" src="'+this.gb+'" alt="" '),this.write('style="left:'+f.start*a+"%;width:"+f.eb*a+'%"\n>'));f.tb.length>0&&(this.write('<span class="'+this.w+'inline" style="left:'+(f.start+Math.max(f.duration,f.eb))*a+'%"> '),this.write(f.tb),this.write("</span>"));f.ha.length>
-0&&this.write("</a>");this.write("</div></td></tr>\n")}ud(this,b,c,a);this.write("</table>\n");return this.hb.join("")};$.prototype.fc=function(a,b,c,d,f,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,tb:f,ha:g})};aa("Gantt",$);$.prototype.add_bar=$.prototype.fc;$.prototype.draw=$.prototype.hc;})();
+0&&this.write("</a>");this.write("</div></td></tr>\n")}wd(this,b,c,a);this.write("</table>\n");return this.hb.join("")};$.prototype.fc=function(a,b,c,d,f,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,tb:f,ha:g})};aa("Gantt",$);$.prototype.add_bar=$.prototype.fc;$.prototype.draw=$.prototype.hc;})();
diff --git a/google/appengine/ext/appstats/ui.py b/google/appengine/ext/appstats/ui.py
index c74e6a1..65d42e2 100755
--- a/google/appengine/ext/appstats/ui.py
+++ b/google/appengine/ext/appstats/ui.py
@@ -43,15 +43,12 @@
from google.appengine.api import users
from google.appengine.ext import webapp
+from google.appengine.ext.webapp import _template
from google.appengine.ext.webapp import util
from google.appengine.ext.appstats import recording
-
-
DEBUG = recording.config.DEBUG
-from google.appengine.ext.webapp import template
-import django
def render(tmplname, data):
@@ -60,7 +57,7 @@
tmpl = os.path.join(here, 'templates', tmplname)
data['env'] = os.environ
try:
- return template.render(tmpl, data)
+ return _template.render(tmpl, data)
except Exception, err:
logging.exception('Failed to render %s', tmpl)
return 'Problematic template %s: %s' % (tmplname, err)
@@ -291,15 +288,6 @@
-if django.VERSION[:2] < (0, 97):
- from django.template import defaultfilters
- def safe(text, dummy=None):
- return text
- defaultfilters.register.filter("safe", safe)
-
-
-
-
URLMAP = [
('.*/details', DetailsHandler),
('.*/file', FileHandler),
@@ -308,19 +296,30 @@
]
+class AuthCheckMiddleware(object):
+ """Middleware which conducts an auth check."""
+
+ def __init__(self, application):
+ self._application = application
+
+ def __call__(self, environ, start_response):
+ if not environ.get('SERVER_SOFTWARE', '').startswith('Dev'):
+ if not users.is_current_user_admin():
+ if users.get_current_user() is None:
+ start_response('302 Found',
+ [('Location',
+ users.create_login_url(os.getenv('PATH_INFO', '')))])
+ return []
+ else:
+ start_response('403 Forbidden', [])
+ return ['Forbidden\n']
+ return self._application(environ, start_response)
+
+app = AuthCheckMiddleware(webapp.WSGIApplication(URLMAP, debug=DEBUG))
+
+
def main():
- """Main program. Auth check, then create and run the WSGIApplication."""
- if not os.getenv('SERVER_SOFTWARE', '').startswith('Dev'):
- if not users.is_current_user_admin():
- if users.get_current_user() is None:
- print 'Status: 302'
- print 'Location:', users.create_login_url(os.getenv('PATH_INFO', ''))
- else:
- print 'Status: 403'
- print
- print 'Forbidden'
- return
- app = webapp.WSGIApplication(URLMAP, debug=DEBUG)
+ """Main program. Run the auth checking middleware wrapped WSGIApplication."""
util.run_bare_wsgi_app(app)
diff --git a/google/appengine/ext/blobstore/blobstore.py b/google/appengine/ext/blobstore/blobstore.py
index 17d8d1a..5ee14d9 100755
--- a/google/appengine/ext/blobstore/blobstore.py
+++ b/google/appengine/ext/blobstore/blobstore.py
@@ -31,6 +31,7 @@
+import base64
import cgi
import email
import os
@@ -138,6 +139,7 @@
creation: Creation date of blob, when it was uploaded.
filename: Filename user selected from their machine.
size: Size of uncompressed blob.
+ md5_hash: The md5 hash value of the uploaded blob.
All properties are read-only. Attempting to assign a value to a property
will raise NotImplementedError.
@@ -146,7 +148,8 @@
_unindexed_properties = frozenset()
- _all_properties = frozenset(['content_type', 'creation', 'filename', 'size'])
+ _all_properties = frozenset(['content_type', 'creation', 'filename',
+ 'size', 'md5_hash'])
@property
def content_type(self):
@@ -403,6 +406,8 @@
content_type = get_value(upload_content, 'content-type')
size = get_value(upload_content, 'content-length')
creation_string = get_value(upload_content, UPLOAD_INFO_CREATION_HEADER)
+ md5_hash_encoded = get_value(upload_content, 'content-md5')
+ md5_hash = base64.urlsafe_b64decode(md5_hash_encoded)
try:
size = int(size)
@@ -420,6 +425,7 @@
'creation': creation,
'filename': filename,
'size': size,
+ 'md5_hash': md5_hash,
})
diff --git a/google/appengine/ext/builtins/__init__.py b/google/appengine/ext/builtins/__init__.py
index 4b38287..c428bf0 100755
--- a/google/appengine/ext/builtins/__init__.py
+++ b/google/appengine/ext/builtins/__init__.py
@@ -36,6 +36,7 @@
+
import logging
import os
@@ -49,7 +50,8 @@
_available_builtins = None
-INCLUDE_FILENAME = 'include.yaml'
+INCLUDE_FILENAME_TEMPLATE = 'include-%s.yaml'
+DEFAULT_INCLUDE_FILENAME = 'include.yaml'
class InvalidBuiltinName(Exception):
@@ -76,21 +78,26 @@
AVAILABLE_BUILTINS.
"""
for filename in os.listdir(_handler_dir):
- if os.path.isfile(_get_yaml_path(filename)):
+ if os.path.isfile(_get_yaml_path(filename, '')):
_available_builtins.append(filename)
-def _get_yaml_path(builtin_name):
+def _get_yaml_path(builtin_name, runtime):
"""Return expected path to a builtin handler's yaml file without error check.
"""
- return os.path.join(_handler_dir, builtin_name, INCLUDE_FILENAME)
+ runtime_specific = os.path.join(_handler_dir, builtin_name,
+ INCLUDE_FILENAME_TEMPLATE % runtime)
+ if runtime and os.path.exists(runtime_specific):
+ return runtime_specific
+ return os.path.join(_handler_dir, builtin_name, DEFAULT_INCLUDE_FILENAME)
-def get_yaml_path(builtin_name):
+def get_yaml_path(builtin_name, runtime=''):
"""Returns the full path to a yaml file by giving the builtin module's name.
Args:
builtin_name: single word name of builtin handler
+ runtime: name of the runtime
Raises:
ValueError: if handler does not exist in expected directory
@@ -105,7 +112,7 @@
raise InvalidBuiltinName('%s is not the name of a valid builtin handler.\n'
'Available handlers are: %s' % (
builtin_name, ', '.join(_available_builtins)))
- return _get_yaml_path(builtin_name)
+ return _get_yaml_path(builtin_name, runtime)
diff --git a/google/appengine/ext/builtins/admin_redirect/include-python27.yaml b/google/appengine/ext/builtins/admin_redirect/include-python27.yaml
new file mode 100644
index 0000000..f40f1e3
--- /dev/null
+++ b/google/appengine/ext/builtins/admin_redirect/include-python27.yaml
@@ -0,0 +1,3 @@
+handlers:
+- url: /_ah/admin.*
+ script: google.appengine.ext.admin_redirect.main.APP
diff --git a/google/appengine/ext/builtins/appstats/include-python27.yaml b/google/appengine/ext/builtins/appstats/include-python27.yaml
new file mode 100644
index 0000000..426777b
--- /dev/null
+++ b/google/appengine/ext/builtins/appstats/include-python27.yaml
@@ -0,0 +1,3 @@
+handlers:
+- url: /_ah/stats.*
+ script: google.appengine.ext.appstats.ui.app
diff --git a/google/appengine/ext/builtins/datastore_admin/include-python27.yaml b/google/appengine/ext/builtins/datastore_admin/include-python27.yaml
new file mode 100644
index 0000000..4dedbed
--- /dev/null
+++ b/google/appengine/ext/builtins/datastore_admin/include-python27.yaml
@@ -0,0 +1,12 @@
+builtins:
+- mapreduce: on
+
+handlers:
+- url: /_ah/datastore_admin.*
+ script: google.appengine.ext.datastore_admin.main.APP
+ login: admin
+
+admin_console:
+ pages:
+ - name: Datastore Admin
+ url: /_ah/datastore_admin/
diff --git a/google/appengine/ext/builtins/default/include-python27.yaml b/google/appengine/ext/builtins/default/include-python27.yaml
new file mode 100644
index 0000000..5fcb221
--- /dev/null
+++ b/google/appengine/ext/builtins/default/include-python27.yaml
@@ -0,0 +1 @@
+# No defaults yet.
diff --git a/google/appengine/ext/builtins/deferred/include-python27.yaml b/google/appengine/ext/builtins/deferred/include-python27.yaml
new file mode 100644
index 0000000..08d0589
--- /dev/null
+++ b/google/appengine/ext/builtins/deferred/include-python27.yaml
@@ -0,0 +1,4 @@
+handlers:
+- url: /_ah/queue/deferred
+ script: google.appengine.ext.deferred.application
+ login: admin
diff --git a/google/appengine/ext/builtins/mapreduce/include-python27.yaml b/google/appengine/ext/builtins/mapreduce/include-python27.yaml
new file mode 100644
index 0000000..1ab5d68
--- /dev/null
+++ b/google/appengine/ext/builtins/mapreduce/include-python27.yaml
@@ -0,0 +1,4 @@
+handlers:
+- url: /_ah/mapreduce.*
+ script: google.appengine.ext.mapreduce.main.APP
+ login: admin
diff --git a/google/appengine/ext/builtins/remote_api/include-python27.yaml b/google/appengine/ext/builtins/remote_api/include-python27.yaml
new file mode 100644
index 0000000..051173c
--- /dev/null
+++ b/google/appengine/ext/builtins/remote_api/include-python27.yaml
@@ -0,0 +1,3 @@
+handlers:
+- url: /_ah/remote_api(/.*)?
+ script: google.appengine.ext.remote_api.handler.application
diff --git a/google/appengine/ext/datastore_admin/delete_handler.py b/google/appengine/ext/datastore_admin/delete_handler.py
index a0e1245..9700e1b 100755
--- a/google/appengine/ext/datastore_admin/delete_handler.py
+++ b/google/appengine/ext/datastore_admin/delete_handler.py
@@ -46,12 +46,17 @@
key: key of the entity to delete.
Yields:
- a delete operation if the entity is not an active mapreduce object.
+ a delete operation if the entity is not an active mapreduce or
+ DatastoreAdminOperation object.
"""
if key.kind() in MAPREDUCE_OBJECTS:
entity = datastore.Get(key)
if entity and not entity["active"]:
yield operation.db.Delete(key)
+ elif key.kind() == utils.DatastoreAdminOperation.kind():
+ entity = datastore.Get(key)
+ if entity and not entity["active_jobs"]:
+ yield operation.db.Delete(key)
else:
yield operation.db.Delete(key)
diff --git a/google/appengine/ext/datastore_admin/remote_api_put_stub.py b/google/appengine/ext/datastore_admin/remote_api_put_stub.py
index b9aaa5a..dfd14b5 100755
--- a/google/appengine/ext/datastore_admin/remote_api_put_stub.py
+++ b/google/appengine/ext/datastore_admin/remote_api_put_stub.py
@@ -240,7 +240,7 @@
if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
logging.info('Response unparsable: %s', response)
raise ConfigurationError('Error parsing app_id lookup response')
- if app_info['rtok'] != rtok:
+ if str(app_info['rtok']) != rtok:
logging.info('Response invalid token (expected %s): %s', rtok, response)
raise ConfigurationError('Token validation failed during app_id lookup. '
'(sent %s, got %s)' % (repr(rtok),
diff --git a/google/appengine/ext/datastore_admin/static/css/compiled.css b/google/appengine/ext/datastore_admin/static/css/compiled.css
index b2a639e..2c586e9 100755
--- a/google/appengine/ext/datastore_admin/static/css/compiled.css
+++ b/google/appengine/ext/datastore_admin/static/css/compiled.css
@@ -1,2 +1,2 @@
/* Copyright 2011 Google Inc. All Rights Reserved. */
-html,body,div,h1,h2,h3,h4,h5,h6,p,img,dl,dt,dd,ol,ul,li,table,caption,tbody,tfoot,thead,tr,th,td,form,fieldset,embed,object,applet{margin:0;padding:0;border:0;}body{font-size:62.5%;font-family:Arial,sans-serif;color:#000;background:#fff}a{color:#00c}a:active{color:#f00}a:visited{color:#551a8b}table{border-collapse:collapse;border-width:0;empty-cells:show}ul{padding:0 0 1em 1em}ol{padding:0 0 1em 1.3em}li{line-height:1.5em;padding:0 0 .5em 0}p{padding:0 0 1em 0}h1,h2,h3,h4,h5{padding:0 0 1em 0}h1,h2{font-size:1.3em}h3{font-size:1.1em}h4,h5,table{font-size:1em}sup,sub{font-size:.7em}input,select,textarea,option{font-family:inherit;font-size:inherit}.g-doc,.g-doc-1024,.g-doc-800{font-size:130%}.g-doc{width:100%;text-align:left}.g-section{width:100%;vertical-align:top;display:inline-block}*:first-child+html .g-section{display:block}* html .g-section{overflow:hidden}@-moz-document url-prefix(){.g-section{overflow:hidden}}@-moz-document url-prefix(){.g-section,tt:default{overflow:visible}}.g-section,.g-unit{zoom:1}.g-split .g-unit{text-align:right}.g-split .g-first{text-align:left}.g-doc-1024{width:73.074em;min-width:950px;margin:0 auto;text-align:left}* html .g-doc-1024{width:71.313em}*+html .g-doc-1024{width:71.313em}.g-doc-800{width:57.69em;min-width:750px;margin:0 auto;text-align:left}* html .g-doc-800{width:56.3em}*+html .g-doc-800{width:56.3em}.g-tpl-160 .g-unit,.g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-unit .g-tpl-160 .g-unit{margin:0 0 0 160px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-tpl-160 .g-first,.g-tpl-160 .g-first{margin:0;width:160px;float:left}.g-tpl-160-alt .g-unit,.g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-160-alt .g-unit{margin:0 160px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-tpl-160-alt .g-first,.g-tpl-160-alt .g-first{margin:0;width:160px;float:right}.g-tpl-180 .g-unit,.g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-unit .g-tpl-180 .g-unit{margin:0 0 0 180px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-tpl-180 .g-first,.g-tpl-180 .g-first{margin:0;width:180px;float:left}.g-tpl-180-alt .g-unit,.g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-180-alt .g-unit{margin:0 180px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-tpl-180-alt .g-first,.g-tpl-180-alt .g-first{margin:0;width:180px;float:right}.g-tpl-300 .g-unit,.g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-unit .g-tpl-300 .g-unit{margin:0 0 0 300px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-tpl-300 .g-first,.g-tpl-300 .g-first{margin:0;width:300px;float:left}.g-tpl-300-alt .g-unit,.g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-300-alt .g-unit{margin:0 300px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-tpl-300-alt .g-first,.g-tpl-300-alt .g-first{margin:0;width:300px;float:right}.g-tpl-25-75 .g-unit,.g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75 .g-unit{width:74.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-tpl-25-75 .g-first,.g-tpl-25-75 .g-first{width:24.999%;float:left;margin:0}.g-tpl-25-75-alt .g-unit,.g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-unit{width:24.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-tpl-25-75-alt .g-first,.g-tpl-25-75-alt .g-first{width:74.999%;float:right;margin:0}.g-tpl-75-25 .g-unit,.g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25 .g-unit{width:24.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-tpl-75-25 .g-first,.g-tpl-75-25 .g-first{width:74.999%;float:left;margin:0}.g-tpl-75-25-alt .g-unit,.g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-unit{width:74.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-tpl-75-25-alt .g-first,.g-tpl-75-25-alt .g-first{width:24.999%;float:right;margin:0}.g-tpl-33-67 .g-unit,.g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67 .g-unit{width:66.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-tpl-33-67 .g-first,.g-tpl-33-67 .g-first{width:32.999%;float:left;margin:0}.g-tpl-33-67-alt .g-unit,.g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-unit{width:32.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-tpl-33-67-alt .g-first,.g-tpl-33-67-alt .g-first{width:66.999%;float:right;margin:0}.g-tpl-67-33 .g-unit,.g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33 .g-unit{width:32.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-tpl-67-33 .g-first,.g-tpl-67-33 .g-first{width:66.999%;float:left;margin:0}.g-tpl-67-33-alt .g-unit,.g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-unit{width:66.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-tpl-67-33-alt .g-first,.g-tpl-67-33-alt .g-first{width:32.999%;float:right;margin:0}.g-tpl-50-50 .g-unit,.g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50 .g-unit{width:49.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-tpl-50-50 .g-first,.g-tpl-50-50 .g-first{width:49.999%;float:left;margin:0}.g-tpl-50-50-alt .g-unit,.g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-unit{width:49.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-tpl-50-50-alt .g-first,.g-tpl-50-50-alt .g-first{width:49.999%;float:right;margin:0}.g-tpl-nest{width:auto}.g-tpl-nest .g-section{display:inline}.g-tpl-nest .g-unit,.g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest .g-unit{float:left;width:auto;margin:0}.g-tpl-nest-alt .g-unit,.g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest-alt .g-unit{float:right;width:auto;margin:0}.goog-button{border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;border-radius:2px;-webkit-border-radius:2px;-moz-border-radius:2px;font:normal normal normal 13px/13px Arial,sans-serif;color:#000;text-align:middle;text-decoration:none;text-shadow:0 1px 1px rgba(255,255,255,1);background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;margin:0;display:inline;display:-moz-inline-box;display:inline-block;*overflow:visible;padding:4px 8px 5px}a.goog-button,span.goog-button,div.goog-button{padding:4px 8px 5px}.goog-button:visited{color:#000}.goog-button{*display:inline}.goog-button:focus,.goog-button:hover{border-color:#000}.goog-button:active,.goog-button-active{color:#000;background-color:#bbb;border-color:#999 #bbb #bbb #999;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background-image:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-button[disabled],.goog-button[disabled]:active,.goog-button[disabled]:hover{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.goog-button{padding:5px 8px 4px\9}.goog-button{*padding:4px 7px 2px}html>body input.goog-button,x:-moz-any-link,x:default,html>body button.goog-button,x:-moz-any-link,x:default{padding-top:3px;padding-bottom:2px}a.goog-button,x:-moz-any-link,x:default,span.goog-button,x:-moz-any-link,x:default,div.goog-button,x:-moz-any-link,x:default{padding:4px 8px 5px}.goog-button-fixed{padding-left:0!important;padding-right:0!important;width:100%}button.goog-button-icon-c{padding-top:1px;padding-bottom:1px}button.goog-button-icon-c{padding-top:3px\9;padding-bottom:2px\9}button.goog-button-icon-c{*padding-top:0;*padding-bottom:0}html>body button.goog-button-icon-c,x:-moz-any-link,x:default{padding-top:1px;padding-bottom:1px}.goog-button-icon{display:block;margin:0 auto;height:18px;width:18px}html>body .goog-inline-block{display:-moz-inline-box;display:inline-block;}.goog-inline-block{position:relative;display:inline-block}* html .goog-inline-block{display:inline}*:first-child+html .goog-inline-block{display:inline}.goog-custom-button{margin:0 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0)}.goog-custom-button-outer-box,.goog-custom-button-inner-box{border-style:solid;border-color:#bbb #999 #999 #bbb;vertical-align:top}.goog-custom-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-custom-button-inner-box{margin:0 -1px;border-width:0 1px;padding:3px 4px}* html .goog-custom-button-inner-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-outer-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-inner-box{left:-1px}*:first-child+html .goog-custom-button-collapse-right .goog-custom-button-inner-box{border-left-width:2px}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{border-right-width:2px}*:first-child+html .goog-custom-button-collapse-right.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-width:0 1px}*:first-child+html .goog-custom-button-rtl .goog-custom-button-inner-box{left:1px}::root .goog-custom-button,::root .goog-custom-button-outer-box{line-height:0}::root .goog-custom-button-inner-box{line-height:normal}.goog-custom-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-custom-button-disabled .goog-custom-button-outer-box,.goog-custom-button-disabled .goog-custom-button-inner-box{color:#333!important;border-color:#999!important}* html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-custom-button-hover .goog-custom-button-outer-box,.goog-custom-button-hover .goog-custom-button-inner-box{border-color:#000!important;}.goog-custom-button-active,.goog-custom-button-checked{background-color:#bbb;background-position:bottom left;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-custom-button-focused .goog-custom-button-outer-box,.goog-custom-button-focused .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-color:#000}.goog-custom-button-collapse-right,.goog-custom-button-collapse-right .goog-custom-button-outer-box,.goog-custom-button-collapse-right .goog-custom-button-inner-box{margin-right:0}.goog-custom-button-collapse-left,.goog-custom-button-collapse-left .goog-custom-button-outer-box,.goog-custom-button-collapse-left .goog-custom-button-inner-box{margin-left:0}.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-left:1px solid #fff}.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-left:1px solid #ddd}* html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}.goog-date-picker th,.goog-date-picker td{font-family:arial,sans-serif;text-align:center}.goog-date-picker th{font-size:.9em;font-weight:bold;color:#666667;background-color:#c3d9ff}.goog-date-picker td{vertical-align:middle;padding:2px 3px}.goog-date-picker{-moz-user-focus:normal;-moz-user-select:none;position:absolute;border:1px solid gray;float:left;font-family:arial,sans-serif;padding-left:1px;background:white}.goog-date-picker-menu{position:absolute;background:threedface;border:1px solid gray;-moz-user-focus:normal}.goog-date-picker-menu ul{list-style:none;margin:0;padding:0}.goog-date-picker-menu ul li{cursor:default}.goog-date-picker-menu-selected{background-color:#aaccee}.goog-date-picker td div{float:left}.goog-date-picker button{padding:0;margin:1px;border:1px outset gray}.goog-date-picker-week{padding:1px 3px}.goog-date-picker-wday{padding:1px 3px}.goog-date-picker-today-cont{text-align:left!important}.goog-date-picker-none-cont{text-align:right!important}.goog-date-picker-head td{text-align:center}.goog-date-picker-month{width:12ex}.goog-date-picker-year{width:6ex}.goog-date-picker table{border-collapse:collapse}.goog-date-picker-selected{background-color:#aaccee!important;color:blue!important}.goog-date-picker-today{font-weight:bold!important}.goog-date-picker-other-month{-moz-opacity:0.3;filter:Alpha(Opacity=30)}.sat,.sun{background:#eee}#button1,#button2{display:block;width:60px;text-align:center;margin:10px;padding:10px;font:normal .8em arial,sans-serif;border:1px solid #000}.goog-menu{position:absolute;color:#000;border:1px solid #b5b6b5;background-color:#f3f3f7;cursor:default;font:normal small arial,helvetica,sans-serif;margin:0;padding:0;outline:none}.goog-menuitem{padding:2px 5px;margin:0;list-style:none}.goog-menuitem-highlight{background-color:#4279a5;color:#fff}.goog-menuitem-disabled{color:#999}.goog-option{padding-left:15px!important}.goog-option-selected{background-image:url(/img/check.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menuseparator{position:relative;margin:2px 0;border-top:1px solid #999;padding:0;outline:none}.goog-submenu{position:relative}.goog-submenu-arrow{position:absolute;display:block;width:11px;height:11px;right:3px;top:4px;background-image:url(/img/menu-arrows.gif);background-repeat:no-repeat;background-position:0 0;font-size:1px}.goog-menuitem-highlight .goog-submenu-arrow{background-position:0 -11px}.goog-menuitem-disabled .goog-submenu-arrow{display:none}.goog-menu-filter{margin:2px;border:1px solid silver;background:white;overflow:hidden}.goog-menu-filter div{color:gray;position:absolute;padding:1px}.goog-menu-filter input{margin:0;border:0;background:transparent;width:100%}.goog-menuitem-partially-checked{background-image:url(/img/check-outline.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menuitem-fully-checked{background-image:url(/img/check.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menu-button{margin:0 2px 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;background:#ddd url("/img/button-bg.gif") repeat-x top left;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none}.goog-menu-button-outer-box,.goog-menu-button-inner-box{border-style:solid;border-color:#aaa;vertical-align:middle}.goog-menu-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-menu-button-inner-box{margin:0 -1px;border-width:0 1px;padding:0 4px 2px 4px}* html .goog-menu-button-inner-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-outer-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-inner-box{left:0}*:first-child+html .goog-menu-button-inner-box{left:-1px}*:first-child+html .goog-menu-button-rtl .goog-menu-button-inner-box{left:1px}::root .goog-menu-button,::root .goog-menu-button-outer-box,::root .goog-menu-button-inner-box{line-height:0}::root .goog-menu-button-caption,::root .goog-menu-button-dropdown{line-height:normal}.goog-menu-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-menu-button-disabled .goog-menu-button-outer-box,.goog-menu-button-disabled .goog-menu-button-inner-box,.goog-menu-button-disabled .goog-menu-button-caption,.goog-menu-button-disabled .goog-menu-button-dropdown{color:#333!important;border-color:#999!important}* html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-menu-button-hover .goog-menu-button-outer-box,.goog-menu-button-hover .goog-menu-button-inner-box{border-color:#9cf #69e #69e #7af!important;}.goog-menu-button-active,.goog-menu-button-open{background-color:#bbb;background-position:bottom left}.goog-menu-button-focused .goog-menu-button-outer-box,.goog-menu-button-focused .goog-menu-button-inner-box{border-color:#3366cc}.goog-menu-button-caption{padding:0 4px 0 0;vertical-align:middle}.goog-menu-button-rtl .goog-menu-button-caption{padding:0 0 0 4px}.goog-menu-button-dropdown{width:7px;background:url(/img/toolbar_icons.gif) no-repeat -176px;vertical-align:middle}.goog-flat-menu-button{margin:0 2px;padding:1px 4px;font:normal 95% Tahoma,Arial,sans-serif;color:#333;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;-moz-outline:none;border-width:1px;border-style:solid;border-color:#c9c9c9;background-color:#fff}.goog-flat-menu-button-disabled *{color:#999;border-color:#ccc;cursor:default}.goog-flat-menu-button-hover,.goog-flat-menu-button-hover{border-color:#9cf #69e #69e #7af!important;}.goog-flat-menu-button-active{background-color:#bbb;background-position:bottom left}.goog-flat-menu-button-focused{border-color:#3366cc}.goog-flat-menu-button-caption{padding-right:10px;vertical-align:middle}.goog-flat-menu-button-dropdown{width:7px;background:url(/img/toolbar_icons.gif) no-repeat -176px;vertical-align:middle}h1{font-size:1.8em}.g-doc{width:auto;margin:0 10px}.g-doc-1024{margin-left:10px}#ae-logo{background:url(//www.google.com/images/logos/app_engine_logo_sm.gif) 0 0 no-repeat;display:block;width:178px;height:30px;margin:4px 0 0 0}.ae-ir span{position:absolute;display:block;width:0;height:0;overflow:hidden}.ae-noscript{position:absolute;left:-5000px}#ae-lhs-nav{border-right:3px solid #e5ecf9}.ae-notification{margin-bottom:.6em;text-align:center}.ae-notification strong{display:block;width:55%;margin:0 auto;text-align:center;padding:.6em;background-color:#fff1a8;font-weight:bold}.ae-alert{font-weight:bold;background:url(/img/icn/warning.png) no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-info{background:url(/img/icn/icn-info.gif) no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-promo{padding:.5em .8em;margin:.6em 0;background-color:#fffbe8;border:1px solid #fff1a9;text-align:left}.ae-promo strong{position:relative;top:.3em}.ae-alert-text,.ae-warning-text{background-color:transparent;background-position:right 1px;padding:0 18px 0 0}.ae-alert-text{color:#c00}.ae-warning-text{color:#f90}.ae-alert-c span{display:inline-block}.ae-message{border:1px solid #e5ecf9;background-color:#f6f9ff;margin-bottom:1em;padding:.5em}.ae-errorbox{border:1px solid #f00;background-color:#fee;margin-bottom:1em;padding:1em}#bd .ae-errorbox ul{padding-bottom:0}.ae-form dt{font-weight:bold}.ae-form dt em,.ae-field-hint{margin-top:.2em;color:#666667;font-size:.85em}.ae-field-yyyymmdd,.ae-field-hhmmss{width:6em}.ae-field-hint-hhmmss{margin-left:2.3em}.ae-form label{display:block;margin:0 0 .2em 0;font-weight:bold}.ae-radio{margin-bottom:.3em}.ae-radio label{display:inline}.ae-form dd,.ae-input-row{margin-bottom:.6em}.ae-input-row-group{border:1px solid #fff1a9;background:#fffbe8;padding:8px}.ae-btn-row{margin-top:1.4em;margin-bottom:1em}.ae-btn-row-note{padding:5px 0 6px 0}.ae-btn-row-note span{padding-left:18px;padding-right:.5em;background:transparent url(/img/icn/icn-info.gif) 0 0 no-repeat}.ae-btn-primary{font-weight:bold}form .ae-cancel{margin-left:.5em}.ae-submit-inline{margin-left:.8em}.ae-radio-bullet{width:20px;float:left}.ae-label-hanging-indent{margin-left:5px}.ae-divider{margin:0 .6em 0 .5em}.ae-nowrap{white-space:nowrap}.ae-pre-wrap{white-space:pre-wrap;white-space:-moz-pre-wrap;white-space:-pre-wrap;white-space:-o-pre-wrap;word-wrap:break-word;_white-space:pre;}wbr:after{content:"\00200B"}a button{text-decoration:none}.ae-alert ul{margin-bottom:.75em;margin-top:.25em;line-height:1.5em}.ae-alert h4{color:#000;font-weight:bold;padding:0 0 .5em}.ae-form-simple-list{list-style-type:none;padding:0;margin-bottom:1em}.ae-form-simple-list li{padding:.3em 0 .5em .5em;border-bottom:1px solid #c3d9ff}div.ae-datastore-index-to-delete,div.ae-datastore-index-to-build{color:#aaa}#hd p{padding:0}#hd li{display:inline}ul{padding:0 0 1em 1.2em}#ae-userinfo{text-align:right;white-space:nowrap;}#ae-userinfo ul{padding-bottom:0;padding-top:5px}#ae-appbar-lrg{margin:0 0 1.25em 0;padding:.25em .5em;background-color:#e5ecf9;border-top:1px solid #36c}#ae-appbar-lrg h1{font-size:1.2em;padding:0}#ae-appbar-lrg h1 span{font-size:80%;font-weight:normal}#ae-appbar-lrg form{display:inline;padding-right:.1em;margin-right:.5em}#ae-appbar-lrg strong{white-space:nowrap}#ae-appbar-sml{margin:0 0 1.25em 0;height:8px;padding:0 .5em;background:#e5ecf9}.ae-rounded-sml{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px}#ae-appbar-lrg a{margin-top:.3em}a.ae-ext-link,a span.ae-ext-link{background:url(/img/icn/icn-open-in-new-window.png) no-repeat right;padding-right:18px;margin-right:8px}.ae-no-pad{padding-left:1em}.ae-message h4{margin-bottom:.3em;padding-bottom:0}#ft{text-align:center;margin:2.5em 0 1em;padding-top:.5em;border-top:2px solid #c3d9ff}#bd h3{font-weight:bold;font-size:1.4em}#bd h3 .ae-apps-switch{font-weight:normal;font-size:.7em;margin-left:2em}#bd p{padding:0 0 1em 0}#ae-content{padding-left:1em}.ae-unimportant{color:#666}.ae-new-usr td{border-top:1px solid #ccccce;background-color:#ffe}.ae-error-td td{border:2px solid #f00;background-color:#fee}.ae-delete{cursor:pointer;border:none;background:transparent;}.ae-btn-large{background:#039 url(/img/icn/button_back.png) repeat-x;color:#fff;font-weight:bold;font-size:1.2em;padding:.5em;border:2px outset #000;cursor:pointer}.ae-breadcrumb{margin:0 0 1em}.ae-disabled,a.ae-disabled,a.ae-disabled:hover,a.ae-disabled:active{color:#666!important;text-decoration:none!important;cursor:default!important;opacity:.4!important;-moz-opacity:.4!important;filter:alpha(opacity=40)!important}input.ae-readonly{border:2px solid transparent;border-left:0;background-color:transparent}span.ae-text-input-clone{padding:5px 5px 5px 0}.ae-loading{opacity:.4;-moz-opacity:.4;filter:alpha(opacity=40)}.ae-tip{margin:1em 0;background:url(/img/tip.png) top left no-repeat;padding:2px 0 0 25px}sup.ae-new-sup{color:red}.ae-action{color:#00c;cursor:pointer;text-decoration:underline}.ae-toggle{padding-left:16px;background-position:left center;background-repeat:no-repeat;cursor:pointer}.ae-minus{background-image:url(/img/wgt/minus.gif)}.ae-plus{background-image:url(/img/wgt/plus.gif)}.ae-print{background-image:url(/img/print.gif);padding-left:19px}.ae-currency,.ae-table thead th.ae-currency{text-align:right;white-space:nowrap}#ae-loading{font-size:1.2em;position:absolute;text-align:center;top:0;width:100%}#ae-loading div{margin:0 auto;background:#fff1a9;width:5em;font-weight:bold;padding:4px 10px;-moz-border-radius-bottomleft:3px;-moz-border-radius-bottomright:3px;-webkit-border-radius-bottomleft:3px;-webkit-border-radius-bottomright:3px}.ae-occlude{filter:alpha(opacity=0);position:absolute}.g-tpl-66-34 .g-unit,.g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-unit .g-tpl-66-34 .g-unit{display:inline;margin:0;width:33.999%;float:right}.g-unit .g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-tpl-66-34 .g-first,.g-tpl-66-34 .g-first{display:inline;margin:0;width:65.999%;float:left}.ae-ie6-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}h2.ae-section-header{background:#e5ecf9;padding:.2em .4em;margin-bottom:.5em}.ae-field-span{padding:3px 0}select{font:13px/13px Arial,sans-serif;color:#000;border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;-webkit-border-radius:2px;-moz-border-radius:2px;background:#eee;background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;padding:2px 1px;margin:0}select:hover{border-color:#000}select[disabled],select[disabled]:active{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.ae-table-plain{border-collapse:collapse;width:100%}.ae-table{border:1px solid #c5d7ef;border-collapse:collapse;width:100%}#bd h2.ae-table-title{background:#e5ecf9;margin:0;color:#000;font-size:1em;padding:3px 0 3px 5px;border-left:1px solid #c5d7ef;border-right:1px solid #c5d7ef;border-top:1px solid #c5d7ef}.ae-table-caption,.ae-table caption{border:1px solid #c5d7ef;background:#e5ecf9;-moz-margin-start:-1px}.ae-table caption{padding:3px 5px;text-align:left}.ae-table th,.ae-table td{background-color:#fff;padding:.35em 1em .25em .35em;margin:0}.ae-table thead th{font-weight:bold;text-align:left;background:#c5d7ef;vertical-align:bottom}.ae-table thead th .ae-no-bold{font-weight:normal}.ae-table tfoot tr td{border-top:1px solid #c5d7ef;background-color:#e5ecf9}.ae-table td{border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even>td,.ae-even th,.ae-even-top td,.ae-even-tween td,.ae-even-bottom td,ol.ae-even{background-color:#e9e9e9;border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even-top td{border-bottom:0}.ae-even-bottom td{border-top:0}.ae-even-tween td{border:0}.ae-table .ae-tween td{border:0}.ae-table .ae-tween-top td{border-bottom:0}.ae-table .ae-tween-bottom td{border-top:0}#bd .ae-table .cbc{width:1.5em;padding-right:0}.ae-table #ae-live td{background-color:#ffeac0}.ae-table-fixed{table-layout:fixed}.ae-table-fixed td,.ae-table-nowrap{overflow:hidden;white-space:nowrap}.ae-paginate strong{margin:0 .5em}tfoot .ae-paginate{text-align:right}.ae-table-caption .ae-paginate,.ae-table-caption .ae-orderby{padding:2px 5px}.modal-dialog{background:#c1d9ff;border:1px solid #3a5774;color:#000;padding:4px;position:absolute;font-size:1.3em;-moz-box-shadow:0 1px 4px #333;-webkit-box-shadow:0 1px 4px #333;box-shadow:0 1px 4px #333}.modal-dialog a,.modal-dialog a:link,.modal-dialog a:visited{color:#06c;cursor:pointer}.modal-dialog-bg{background:#666;left:0;position:absolute;top:0}.modal-dialog-title{background:#e0edfe;color:#000;cursor:pointer;font-size:120%;font-weight:bold;padding:8px 15px 8px 8px;position:relative;_zoom:1;}.modal-dialog-title-close{background:#e0edfe url(https://ssl.gstatic.com/editor/editortoolbar.png) no-repeat -528px 0;cursor:default;height:15px;position:absolute;right:10px;top:8px;width:15px;vertical-align:middle}.modal-dialog-buttons,.modal-dialog-content{background-color:#fff;padding:8px}.modal-dialog-buttons button{margin-right:.75em}.goog-buttonset-default{font-weight:bold}.goog-tab{position:relative;border:1px solid #8ac;padding:4px 9px;color:#000;background:#e5ecf9;border-top-left-radius:2px;border-top-right-radius:2px;-moz-border-radius-topleft:2px;-webkit-border-top-left-radius:2px;-moz-border-radius-topright:2px;-webkit-border-top-right-radius:2px}.goog-tab-bar-top .goog-tab{margin:1px 4px 0 0;border-bottom:0;float:left}.goog-tab-bar-bottom .goog-tab{margin:0 4px 1px 0;border-top:0;float:left}.goog-tab-bar-start .goog-tab{margin:0 0 4px 1px;border-right:0}.goog-tab-bar-end .goog-tab{margin:0 1px 4px 0;border-left:0}.goog-tab-hover{text-decoration:underline;cursor:pointer}.goog-tab-disabled{color:#fff;background:#ccc;border-color:#ccc}.goog-tab-selected{background:#fff!important;color:black;font-weight:bold}.goog-tab-bar-top .goog-tab-selected{top:1px;margin-top:0;padding-bottom:5px}.goog-tab-bar-bottom .goog-tab-selected{top:-1px;margin-bottom:0;padding-top:5px}.goog-tab-bar-start .goog-tab-selected{left:1px;margin-left:0;padding-right:9px}.goog-tab-bar-end .goog-tab-selected{left:-1px;margin-right:0;padding-left:9px}.goog-tab-content{padding:.1em .8em .8em .8em;border:1px solid #8ac;border-top:none}.goog-tab-bar{position:relative;margin:0 0 0 5px;border:0;padding:0;list-style:none;cursor:default;outline:none}.goog-tab-bar-clear{border-top:1px solid #8ac;clear:both;height:0;overflow:hidden}.goog-tab-bar-start{float:left}.goog-tab-bar-end{float:right}* html .goog-tab-bar-start{margin-right:-3px}* html .goog-tab-bar-end{margin-left:-3px}#ae-nav ul{list-style-type:none;margin:0;padding:1em 0}#ae-nav ul li{padding-left:.5em}#ae-nav .ae-nav-selected{color:#000;display:block;font-weight:bold;background-color:#e5ecf9;margin-right:-1px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;-webkit-border-top-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;-webkit-border-bottom-left-radius:4px}#ae-nav .ae-nav-bold{font-weight:bold}#ae-nav ul li span.ae-nav-disabled{color:#666}#ae-nav ul ul{margin:0;padding:0 0 0 .5em}#ae-nav ul ul li{padding-left:.5em}#ae-nav ul li a,#ae-nav ul li span,#ae-nav ul ul li a{padding-left:.5em}#ae-nav li a:link,#ae-nav li a:visited{color:#00c}.ae-nav-group{padding:.5em;margin:0 .75em 0 0;background-color:#fffbe8;border:1px solid #fff1a9}.ae-nav-group h4{font-weight:bold;padding:auto auto .5em .5em;padding-left:.4em;margin-bottom:.5em;padding-bottom:0}.ae-nav-group ul{margin:0 0 .5em 0;padding:0 0 0 1.3em;list-style-type:none}.ae-nav-group ul li{padding-bottom:.5em}.ae-nav-group li a:link,.ae-nav-group li a:visited{color:#00c}.ae-nav-group li a:hover{color:#00c}@media print{body{font-size:13px;width:8.5in;background:#fff}table,.ae-table-fixed{table-layout:automatic}tr{display:table-row!important}.g-doc-1024{width:8.5in}#ae-appbar-lrg,.ae-table-caption,.ae-table-nowrap,.ae-nowrap,th,td{overflow:visible!important;white-space:normal!important;background:#fff!important}.ae-print,.ae-toggle{display:none}#ae-lhs-nav-c{display:none}#ae-content{margin:0;padding:0}.goog-zippy-collapsed,.goog-zippy-expanded{background:none!important;padding:0!important}}#ae-admin-dev-table{margin:0 0 1em 0}.ae-admin-dev-tip,.ae-admin-dev-tip.ae-tip{margin:-0.31em 0 2.77em}#ae-sms-countryselect{margin-right:.5em}#ae-admin-enable-form{margin-bottom:1em}#ae-admin-services-c{margin-top:2em}#ae-admin-services{padding:0 0 0 3em;margin-bottom:1em;font-weight:bold}#ae-admin-logs-table-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-admin-logs-table{margin:0;padding:0}#ae-admin-logs-filters{padding:3px 0 3px 5px}#ae-admin-logs-pagination{padding:6px 5px 0 0;text-align:right;width:45%}#ae-admin-logs-pagination span.ae-disabled{color:#666;background-color:transparent}#ae-admin-logs-table td{white-space:nowrap}#ae-storage-content div.ae-alert{padding-bottom:5px}#ae-admin-performance-form input[type=text]{width:2em}.ae-admin-performance-value{font-weight:normal}.ae-admin-performance-static-value{color:#666}.goog-slider-horizontal,.goog-twothumbslider-horizontal{position:relative;width:502px;height:7px;display:block;outline:0;margin:1.0em 0 0.9em 3em}.ae-slider-rail:before{position:relative;top:-0.462em;float:left;content:'Min';margin:0 0 0 -3em;color:#999}.ae-slider-rail{position:absolute;background-color:#d9d9d9;top:0;right:8px;bottom:0;left:8px;border:solid 1px;border-color:#a6a6a6 #b3b3b3 #bfbfbf;border-radius:5px}.ae-slider-rail:after{position:relative;top:-0.462em;float:right;content:'Max';margin:0 -3em 0 0;color:#999}.goog-slider-horizontal .goog-slider-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{position:absolute;width:17px;height:17px;background:transparent url(/img/slider_thumb-down.png) no-repeat;outline:0}.goog-slider-horizontal .goog-slider-thumb{top:-5px}.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb{top:-11px}.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{top:2px;background-image:url(/img/slider_thumb-up.png)}.ae-admin-performance-scale{position:relative;display:inline-block;width:502px;margin:0 0 2.7em 3em}.ae-admin-performance-scale .ae-admin-performance-scale-start{position:absolute;display:inline-block;top:0;width:100%;text-align:left}.ae-admin-performance-scale .ae-admin-performance-scale-mid{position:absolute;display:inline-block;top:0;width:100%;text-align:center}.ae-admin-performance-scale .ae-admin-performance-scale-end{position:absolute;display:inline-block;top:0;width:100%;text-align:right}.ae-absolute-container{display:inline-block;width:100%}#ae-billing-form-c{_margin-right:-3000px;_position:relative;_width:100%}.ae-rounded-top-small{-moz-border-radius-topleft:3px;-webkit-border-top-left-radius:3px;-moz-border-radius-topright:3px;-webkit-border-top-right-radius:3px}.ae-progress-content{height:400px}#ae-billing-tos{text-align:left;width:100%;margin-bottom:.5em}.ae-billing-budget-section{margin-bottom:1.5em}.ae-billing-budget-section .g-unit,.g-unit .ae-billing-budget-section .g-unit,.g-unit .g-unit .ae-billing-budget-section .g-unit{margin:0 0 0 11em;width:auto;float:none}.g-unit .g-unit .ae-billing-budget-section .g-first,.g-unit .ae-billing-budget-section .g-first,.ae-billing-budget-section .g-first{margin:0;width:11em;float:left}#ae-billing-form .ae-btn-row{margin-left:11em}#ae-billing-form .ae-btn-row .ae-info{margin-top:10px}#ae-billing-checkout{width:150px;float:left}#ae-billing-alloc-table{border:1px solid #c5d7ef;border-bottom:none;width:100%;margin-top:.5em}#ae-billing-alloc-table th,#ae-billing-alloc-table td{padding:.35em 1em .25em .35em;border-bottom:1px solid #c5d7ef;color:#000;white-space:nowrap}.ae-billing-resource{background-color:transparent;font-weight:normal}#ae-billing-alloc-table tr th span{font-weight:normal}#ae-billing-alloc-table tr{vertical-align:baseline}#ae-billing-alloc-table th{white-space:nowrap}#ae-billing-alloc-table .ae-editable span.ae-text-input-clone,#ae-billing-alloc-table .ae-readonly input{display:none}#ae-billing-alloc-table .ae-readonly span.ae-text-input-clone,#ae-billing-alloc-table .ae-editable input{display:inline}#ae-billing-alloc-table td span.ae-billing-warn-note,#ae-billing-table-errors .ae-billing-warn-note{margin:0;background-repeat:no-repeat;display:inline-block;background-image:url(/img/icn/warning.png);text-align:right;padding-left:16px;padding-right:.1em;height:16px;font-weight:bold}#ae-billing-alloc-table td span.ae-billing-warn-note span,#ae-billing-table-errors .ae-billing-warn-note span{vertical-align:super;font-size:80%}#ae-billing-alloc-table td span.ae-billing-error-hidden,#ae-billing-table-errors .ae-billing-error-hidden{display:none}.ae-billing-percent{font-size:80%;color:#666;margin-left:3px}#ae-billing-week-info{margin-top:5px;line-height:1.4}#ae-billing-table-errors{margin-top:.3em}#ae-billing-allocation-noscript{margin-top:1.5em}#ae-billing-allocation-custom-opts{margin-left:2.2em}#ae-billing-settings h2{font-size:1em;display:inline}#ae-billing-settings p{padding:.3em 0 .5em}#ae-billing-settings-table{margin:.4em 0 .5em}#ae-settings-resource-col{width:19%}#ae-settings-budget-col{width:11%}#ae-billing-settings-table .ae-settings-budget-col{padding-right:2em}.ae-table th.ae-settings-unit-cell,.ae-table td.ae-settings-unit-cell,.ae-table th.ae-total-unit-cell,.ae-table td.ae-total-unit-cell{padding-left:1.2em}#ae-settings-unit-col{width:18%}#ae-settings-paid-col{width:15%}#ae-settings-free-col{width:15%}#ae-settings-total-col{width:22%}.ae-billing-inline-link{margin-left:.5em}.ae-billing-settings-section{margin-bottom:2em}.ae-billing-settings-formbutton{margin-top:.5em}#ae-billing-budget-setup-checkout{margin-bottom:0}#ae-billing-vat-c .ae-field-hint{width:85%}#ae-billing-checkout-note{margin-top:.8em}.ae-table thead th.ae-currency-th{text-align:right}#ae-billing-logs-date{width:15%}#ae-billing-logs-admin{width:15%}#ae-billing-logs-event{width:54%}#ae-billing-logs-amount{text-align:right;width:8%}#ae-billing-logs-balance{text-align:right;width:8%}#ae-billing-history-expand .ae-action{margin-left:1em}.ae-table .ae-billing-usage-report{width:100%;*width:auto;margin:0 0 1em 0}.ae-table .ae-billing-usage-report th,.ae-billing-charges th{color:#666;border-top:0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-report td,.ae-billing-charges th,.ae-billing-charges td{background-color:transparent;padding:.4em 0;border-bottom:1px solid #ddd}.ae-table .ae-billing-usage-report tfoot td,.ae-billing-charges tfoot td{border-bottom:none}.ae-billing-report-resource{width:30%}.ae-billing-report-used{width:20%}.ae-billing-report-free{width:20%}.ae-billing-report-paid{width:15%}.ae-billing-report-charge{width:15%}.ae-billing-change-resource{width:85%}.ae-billing-change-budget{width:15%}#ae-billing-always-on-label{display:inline}#ae-billing-budget-buffer-label{display:inline}.ae-billing-charges{width:50%}.ae-billing-charges-charge{text-align:right}.ae-billing-usage-report-container{padding:1em 1em 0 1em}#ae-billing-new-usage{background-color:#f6f9ff}.goog-zippy-expanded{background-image:url(/img/wgt/minus.gif);cursor:pointer;background-repeat:no-repeat;padding-left:17px}.goog-zippy-collapsed{background-image:url(/img/wgt/plus.gif);cursor:pointer;background-repeat:no-repeat;padding-left:17px}#ae-admin-logs-pagination{width:auto}.ae-usage-cycle-note{color:#555}#ae-createapp-start{background-color:#c6d5f1;padding:1em;padding-bottom:2em;text-align:center}#ae-admin-app_id_alias-check,#ae-createapp-id-check{margin:0 0 0 1em}#ae-admin-app_id_alias-message{display:block;margin:.4em 0}#ae-createapp-id-content{width:100%}#ae-createapp-id-content td{vertical-align:top}#ae-createapp-id-td{white-space:nowrap;width:1%}#ae-createapp-id-td #ae-createapp-id-error{position:absolute;width:24em;padding-left:1em;white-space:normal}#ae-createapp-id-error-td{padding-left:1em}#ae-admin-dev-invite label{float:left;width:3.6em;position:relative;top:.3em}#ae-admin-dev-invite .ae-radio{margin-left:3.6em}#ae-admin-dev-invite .ae-radio label{float:none;width:auto;font-weight:normal;position:static}#ae-admin-dev-invite .goog-button{margin-left:3.6em}#ae-admin-dev-invite .ae-field-hint{margin-left:4.2em}#ae-admin-dev-invite .ae-radio .ae-field-hint{margin-left:0}.ae-you{color:#008000}#ae-authdomain-opts{margin-bottom:1em}#ae-authdomain-content .ae-input-text,#ae-authdomain-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-authdomain-opts a{margin-left:1em}#ae-authdomain-opts-hint{margin-top:.2em;color:#666667;font-size:.85em}#ae-authdomain-content #ae-authdomain-desc .ae-field-hint{margin-left:0}#ae-storage-opts{margin-bottom:1em}#ae-storage-content .ae-input-text,#ae-storage-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-storage-opts a{margin-left:1em}#ae-storage-opts-hint{margin-top:.2em;color:#666667;font-size:.85em}#ae-storage-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-dash .g-section{margin:0 0 1em}#ae-dash * .g-section{margin:0}#ae-dash-quota .ae-alert{padding-left:1.5em}.ae-dash-email-disabled{background:url(/img/icn/exclamation_circle.png) no-repeat;margin-top:.5em;margin-bottom:.5em;min-height:16px;padding-left:1.5em}#ae-dash-email-disabled-footnote{padding-left:1.5em;margin:5px 0 0;font-weight:normal}#ae-dash-graph-c{border:1px solid #c5d7ef;padding:5px 0}#ae-dash-graph-change{margin:0 0 0 5px}#ae-dash-graph-img{padding:5px;margin-top:.5em;background-color:#fff;display:block}#ae-dash-graph-nodata{text-align:center}#ae-dash .ae-logs-severity{margin-right:.5em}#ae-dash .g-c{padding:0 0 0 .1em}#ae-dash .g-tpl-50-50 .g-unit .g-c{padding:0 0 0 1em}#ae-dash .g-tpl-50-50 .g-first .g-c{padding:0 1em 0 .1em}.ae-quota-warnings{background-color:#fffbe8;margin:0;padding:.5em .5em 0;text-align:left}.ae-quota-warnings div{padding:0 0 .5em}#ae-dash-quota-refresh-info{font-size:85%}#ae-dash #ae-dash-quota-bar-col,#ae-dash .ae-dash-quota-bar{width:100px}#ae-dash-quotadetails #ae-dash-quota-bar-col,#ae-dash-quotadetails .ae-dash-quota-bar{width:200px}#ae-dash-quota-percent-col{width:3.5em}#ae-dash-quota-cost-col{width:15%}#ae-dash-quota-alert-col{width:1%}#ae-dash .ae-dash-quota-alert-td{padding:0}.ae-dash-quota-alert-td a{display:block;width:16px;height:16px}#ae-dash .ae-dash-quota-alert-td .ae-alert{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash .ae-dash-quota-alert-td .ae-dash-email-disabled{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash-quota tbody th{font-weight:normal}#ae-dash-quota caption{padding:0}#ae-dash-quota caption .g-c{padding:3px}.ae-dash-quota-bar{float:left;background-color:#c0c0c0;height:13px;margin:.1em 0 0 0;position:relative}.ae-dash-quota-bar-free{background:url(/img/free_marker.png) top left no-repeat;width:7px;height:13px;position:absolute;top:0;left:0}#ae-dash-quota-footnote{margin:5px 0 0;font-weight:normal}.ae-quota-warning{background-color:#f90}.ae-quota-alert{background-color:#c00}.ae-quota-normal{background-color:#0b0}.ae-quota-alert-text{color:#c00}.ae-favicon-text{font-size:.85em}#ae-dash-popular{width:97%}#ae-dash-popular-reqsec-col{width:6.5em}#ae-dash-popular-req-col{width:7em}#ae-dash-popular-cpu-avg-col{width:9.5em}#ae-dash-popular-cpu-percent-col{width:7em}#ae-dash-popular .ae-unimportant{font-size:80%}#ae-dash-popular .ae-nowrap,#ae-dash-errors .ae-nowrap{margin-right:5px;overflow:hidden}#ae-dash-popular th span,#ae-dash-errors th span{font-size:.8em;font-weight:normal;display:block}#ae-dash-errors caption .g-unit{width:9em}#ae-dash-errors-count-col{width:5em}#ae-dash-errors-percent-col{width:7em}#ae-dash-graph-chart-type{float:left;margin-right:1em}#ae-apps-all strong.ae-disabled{color:#000;background:#eee}.ae-quota-resource{width:30%}.ae-quota-safety-limit{width:10%}#ae-quota-details h3{padding-bottom:0;margin-bottom:.25em}#ae-quota-details table{margin-bottom:1.75em}#ae-quota-details table.ae-quota-requests{margin-bottom:.5em}#ae-quota-refresh-note p{text-align:right;padding-top:.5em;padding-bottom:0;margin-bottom:0}#ae-quota-first-api.g-section{padding-bottom:0;margin-bottom:.25em}#ae-instances-summary-table,#ae-instances-details-table{margin-bottom:1em}.ae-instances-details-availability-image{float:left;margin-right:.5em}.ae-instances-small-link{font-size:80%}.ae-appbar-superuser-message strong{color:red}#ae-backends-table tr{vertical-align:baseline}.ae-backends-class-reminder{font-size:80%;color:#666;margin-left:3px}#ae-datastore-explorer-c{_margin-right:-3000px;_position:relative;_width:100%}#ae-datastore-explorer form dt{margin:1em 0 0 0}#ae-datastore-explorer #ae-datastore-explorer-labels{margin:0 0 3px}#ae-datastore-explorer-header .ae-action{margin-left:1em}#ae-datastore-explorer .id{white-space:nowrap}#ae-datastore-explorer caption{text-align:right;padding:5px}#ae-datastore-explorer-submit{margin-top:5px}#ae-datastore-explorer-namespace{margin-top:7px;margin-right:5px}#ae-datastore-explorer-gql-spacer{margin-top:22px}h4 #ae-datastore-explorer-gql-label{font-weight:normal}#ae-datastore-form em{font-style:normal;font-weight:normal;margin:0 0 0 .2em;color:#666}#ae-datastore-form dt{font-weight:bold}#ae-datastore-form dd{margin:.4em 0 .3em 1.5em;overflow:auto;zoom:1}#ae-datastore-form dd em{width:4em;float:left}#ae-datastore-form dd.ae-last{margin-bottom:1em}#ae-datastore-explorer-tabs-content{margin-bottom:1em}#ae-datastore-explorer-list .ae-label-row,#ae-datastore-explorer-new .ae-label-row{float:left;padding-top:.2em}#ae-datastore-explorer-list .ae-input-row,#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-input-row,#ae-datastore-explorer-new .ae-btn-row{margin-left:6em}#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-btn-row{margin-bottom:0}.ae-datastore-index-name{font-size:1.2em;font-weight:bold}.ae-table .ae-datastore-index-defs{padding-left:20px}.ae-datastore-index-defs-row{border-top:1px solid #ddd}.ae-datastore-index-defs .ae-unimportant{font-size:.8em}.ae-datastore-index-status{border:1px solid #c0dfbf;background:#f3f7f3;margin:0 25px 0 0;padding:3px}#ae-datastore-index-status-col{width:15%}.ae-datastore-index-status-Building{border-color:#edebcd;background:#fefdec}.ae-datastore-index-status-Deleting{border-color:#ccc;background:#eee}.ae-datastore-index-status-Error{border-color:#ffd3b4;background:#ffeae0}.ae-datastore-pathlink{font-size:.9em}#ae-datastore-stats-top-level-c{padding-bottom:1em;margin-bottom:1em;border-bottom:1px solid #e5ecf9}#ae-datastore-stats-top-level{width:100%}#ae-datastore-stats-piecharts-c{margin-bottom:1em}.ae-datastore-stats-piechart-label{font-size:.85em;font-weight:normal;text-align:center;padding:0}#ae-datastore-stats-property-type{width:65%}#ae-datastore-stats-size-all{width:35%}#ae-datastore-stats-property-name{width:60%}#ae-datastore-stats-type{width:10%}#ae-datastore-stats-size-entity{width:30%}#ae-datastore-blob-filter-form{margin-bottom:1em}#ae-datastore-blob-query-filter-label{padding-right:.5em}#ae-datastore-blob-filter-contents{padding-top:.5em}#ae-datastore-blob-date-after,#ae-datastore-blob-date-before{float:left}#ae-datastore-blob-date-after{margin-right:1em}#ae-datastore-blob-order label{font-weight:normal}#ae-datastore-blob-col-check{width:2%}#ae-datastore-blob-col-file{width:45%}#ae-datastore-blob-col-type{width:14%}#ae-datastore-blob-col-size{width:16%}#ae-blobstore-col-date{width:18%}#ae-blob-detail-filename{padding-bottom:0}#ae-blob-detail-filename span{font-weight:normal}#ae-blob-detail-key{font-size:85%}#ae-blob-detail-preview{margin-top:1em}#ae-blob-detail-dl{text-align:right}#ae-domain-admins-list li{margin-bottom:.3em}#ae-domain-admins-list button{margin-left:.5em}#ae-new-app-dialog-c{width:500px}#ae-new-app-dialog-c .g-section{margin-bottom:1em}#dombilling-tt-setup-note{border:1px solid #ccc;padding:1em;background:#efe}#dombilling-tt-setup-error{padding:0.5em;background:#fee}p.light-note{color:#555}.ae-bottom-message{margin-top:1em}#domusage-apptable{border-top:1px solid #ccc;border-left:1px solid #ccc}#domusage-apptable td,#domusage-apptable th{border-right:1px solid #ccc;border-bottom:1px solid #ccc;padding:2px 6px}#domusage-apptable td.users{text-align:right}#domusage-apptable td.cost{text-align:right}#domusage-apptable td.total-label{text-align:right;border-top:2px solid black;padding:1em 0.25em;border-right:0}#domusage-apptable td.total-cost{font-weight:bold;text-align:right;border-top:2px solid black;padding:1em 0.25em}#domusage-apptable td a{text-decoration:none}#domsettings-form div.ae-radio{margin-left:1.7em}#domsettings-form div.ae-radio input{margin-left:-1.47em;float:left}#ae-logs-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-logs{background-color:#c5d7ef;padding:1px;line-height:1.65}#ae-logs .ae-table-caption{border:0}#ae-logs-c ol,#ae-logs-c li{list-style:none;padding:0;margin:0}#ae-logs-c li li{margin:0 0 0 3px;padding:0 0 0 17px}.ae-log-noerror{padding-left:23px}#ae-logs-form .goog-inline-block{margin-top:0}.ae-logs-reqlog .snippet{margin:.1em}.ae-logs-applog .snippet{color:#666}.ae-logs-severity{display:block;float:left;height:1.2em;width:1.2em;line-height:1.2;text-align:center;text-transform:capitalize;font-weight:bold;border-radius:2px;-moz-border-radius:2px;-webkit-border-radius:2px}.ae-logs-severity-4{background-color:#f22;color:#000}.ae-logs-severity-3{background-color:#f90;color:#000}.ae-logs-severity-2{background-color:#fd0}.ae-logs-severity-1{background-color:#3c0;color:#000}.ae-logs-severity-0{background-color:#09f;color:#000}#ae-logs-legend{margin:1em 0 0 0}#ae-logs-legend ul{list-style:none;margin:0;padding:0}#ae-logs-legend li,#ae-logs-legend strong{float:left;margin:0 1em 0 0}#ae-logs-legend li span{margin-right:.3em}.ae-logs-timestamp{padding:0 5px;font-size:85%}#ae-logs-form-c{margin-bottom:5px;padding-bottom:.5em;padding-left:1em}#ae-logs-form{padding:.3em 0 0}#ae-logs-form .ae-label-row{float:left;padding-top:.2em;margin-right:0.539em}#ae-logs-form .ae-input-row,#ae-logs-form .ae-btn-row{margin-left:4em}#ae-logs-form .ae-btn-row{margin-bottom:0}#ae-logs-requests-c{margin-bottom:.1em}#ae-logs-requests-c input{margin:0}#ae-logs-requests-all-label{margin-right:0.539em}#ae-logs-form-options{margin-top:8px}#ae-logs-tip{margin:.2em 0}#ae-logs-expand{margin-right:.2em}#ae-logs-severity-level-label{margin-top:.3em;display:block}#ae-logs-filter-hint-labels-list{margin:2px 0}#ae-logs-filter-hint-labels-list span{position:absolute}#ae-logs-filter-hint-labels-list ul{margin-left:5.5em;padding:0}#ae-logs-filter-hint-labels-list li{float:left;margin-right:.4em;line-height:1.2}.ae-toggle .ae-logs-getdetails,.ae-toggle pre{display:none}.ae-log-expanded .ae-toggle pre{display:block}#ae-logs-c .ae-log .ae-toggle{cursor:default;background:none;padding-left:0}#ae-logs-c .ae-log .ae-toggle h5{cursor:pointer;background-position:0 .55em;background-repeat:no-repeat;padding-left:17px}.ae-log .ae-plus h5{background-image:url(/img/wgt/plus.gif)}.ae-log .ae-minus h5{background-image:url(/img/wgt/minus.gif)}.ae-log{overflow:hidden;background-color:#fff;padding:.3em 0;line-height:1.65;border-bottom:1px solid #c5d7ef}.ae-log .ae-even{background-color:#e9e9e9;border:0}.ae-log h5{font-weight:normal;white-space:nowrap;padding:.4em 0 0 0}.ae-log span,.ae-log strong{margin:0 .3em}.ae-log .ae-logs-snippet{color:#666}.ae-log pre,.ae-logs-expanded{padding:.3em 0 .5em 1.5em;margin:0;font-family:"Courier New"}.ae-log .file{font-weight:bold}.ae-log.ae-log-expanded .file{white-space:pre-wrap;word-wrap:break-word}.ae-logs-app .ae-logs-req{display:none}.ae-logs-req .ae-app,.ae-logs-both .ae-app{padding-left:1em}#ae-dos-blacklist-rejects-table{text-align:left}#ae-dash-quota-percent-col{width:3.5em}.ae-cron-status-ok{color:#008000;font-size:90%;font-weight:bold}.ae-cron-status-error{color:#a03;font-size:90%;font-weight:bold}#ae-cronjobs-table .ae-table td{vertical-align:top}#ae-tasks-table td{vertical-align:top}#ae-tasks-quota{margin:0 0 1em 0}#ae-tasks-quota .ae-dash-quota-bar{width:150px}#ae-tasks-quota #ae-dash-quota-bar-col,#ae-tasks-quota .ae-dash-quota-bar{width:200px}.ae-tasks-paused-row{color:#666;font-style:italic;font-weight:bold}#ae-tasks-quota .ae-quota-safety-limit{width:30%}#ae-tasks-table{margin-top:1em}#ae-tasks-queuecontrols{margin-top:1em;margin-bottom:1em}#ae-tasks-delete-col{width:1em}#ae-tasks-eta-col,#ae-tasks-creation-col{width:11em}#ae-tasks-actions-col{width:7em}#ae-tasks-retry-col{width:4em}#ae-tasks-body-col{width:6em}#ae-tasks-headers-col{width:7em}.ae-tasks-hex-column,.ae-tasks-ascii-column{width:16em}#ae-tasks-table .ae-tasks-arrow{text-align:center}
\ No newline at end of file
+html,body,div,h1,h2,h3,h4,h5,h6,p,img,dl,dt,dd,ol,ul,li,table,caption,tbody,tfoot,thead,tr,th,td,form,fieldset,embed,object,applet{margin:0;padding:0;border:0;}body{font-size:62.5%;font-family:Arial,sans-serif;color:#000;background:#fff}a{color:#00c}a:active{color:#f00}a:visited{color:#551a8b}table{border-collapse:collapse;border-width:0;empty-cells:show}ul{padding:0 0 1em 1em}ol{padding:0 0 1em 1.3em}li{line-height:1.5em;padding:0 0 .5em 0}p{padding:0 0 1em 0}h1,h2,h3,h4,h5{padding:0 0 1em 0}h1,h2{font-size:1.3em}h3{font-size:1.1em}h4,h5,table{font-size:1em}sup,sub{font-size:.7em}input,select,textarea,option{font-family:inherit;font-size:inherit}.g-doc,.g-doc-1024,.g-doc-800{font-size:130%}.g-doc{width:100%;text-align:left}.g-section{width:100%;vertical-align:top;display:inline-block}*:first-child+html .g-section{display:block}* html .g-section{overflow:hidden}@-moz-document url-prefix(){.g-section{overflow:hidden}}@-moz-document url-prefix(){.g-section,tt:default{overflow:visible}}.g-section,.g-unit{zoom:1}.g-split .g-unit{text-align:right}.g-split .g-first{text-align:left}.g-doc-1024{width:73.074em;min-width:950px;margin:0 auto;text-align:left}* html .g-doc-1024{width:71.313em}*+html .g-doc-1024{width:71.313em}.g-doc-800{width:57.69em;min-width:750px;margin:0 auto;text-align:left}* html .g-doc-800{width:56.3em}*+html .g-doc-800{width:56.3em}.g-tpl-160 .g-unit,.g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-tpl-160 .g-unit,.g-unit .g-unit .g-unit .g-tpl-160 .g-unit{margin:0 0 0 160px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-unit .g-tpl-160 .g-first,.g-unit .g-tpl-160 .g-first,.g-tpl-160 .g-first{margin:0;width:160px;float:left}.g-tpl-160-alt .g-unit,.g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-tpl-160-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-160-alt .g-unit{margin:0 160px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-unit .g-tpl-160-alt .g-first,.g-unit .g-tpl-160-alt .g-first,.g-tpl-160-alt .g-first{margin:0;width:160px;float:right}.g-tpl-180 .g-unit,.g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-tpl-180 .g-unit,.g-unit .g-unit .g-unit .g-tpl-180 .g-unit{margin:0 0 0 180px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-unit .g-tpl-180 .g-first,.g-unit .g-tpl-180 .g-first,.g-tpl-180 .g-first{margin:0;width:180px;float:left}.g-tpl-180-alt .g-unit,.g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-tpl-180-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-180-alt .g-unit{margin:0 180px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-unit .g-tpl-180-alt .g-first,.g-unit .g-tpl-180-alt .g-first,.g-tpl-180-alt .g-first{margin:0;width:180px;float:right}.g-tpl-300 .g-unit,.g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-tpl-300 .g-unit,.g-unit .g-unit .g-unit .g-tpl-300 .g-unit{margin:0 0 0 300px;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-unit .g-tpl-300 .g-first,.g-unit .g-tpl-300 .g-first,.g-tpl-300 .g-first{margin:0;width:300px;float:left}.g-tpl-300-alt .g-unit,.g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-tpl-300-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-300-alt .g-unit{margin:0 300px 0 0;width:auto;float:none}.g-unit .g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-unit .g-tpl-300-alt .g-first,.g-unit .g-tpl-300-alt .g-first,.g-tpl-300-alt .g-first{margin:0;width:300px;float:right}.g-tpl-25-75 .g-unit,.g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-tpl-25-75 .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75 .g-unit{width:74.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-unit .g-tpl-25-75 .g-first,.g-unit .g-tpl-25-75 .g-first,.g-tpl-25-75 .g-first{width:24.999%;float:left;margin:0}.g-tpl-25-75-alt .g-unit,.g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-tpl-25-75-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-unit{width:24.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-unit .g-tpl-25-75-alt .g-first,.g-unit .g-tpl-25-75-alt .g-first,.g-tpl-25-75-alt .g-first{width:74.999%;float:right;margin:0}.g-tpl-75-25 .g-unit,.g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-tpl-75-25 .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25 .g-unit{width:24.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-unit .g-tpl-75-25 .g-first,.g-unit .g-tpl-75-25 .g-first,.g-tpl-75-25 .g-first{width:74.999%;float:left;margin:0}.g-tpl-75-25-alt .g-unit,.g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-tpl-75-25-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-unit{width:74.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-unit .g-tpl-75-25-alt .g-first,.g-unit .g-tpl-75-25-alt .g-first,.g-tpl-75-25-alt .g-first{width:24.999%;float:right;margin:0}.g-tpl-33-67 .g-unit,.g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-tpl-33-67 .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67 .g-unit{width:66.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-unit .g-tpl-33-67 .g-first,.g-unit .g-tpl-33-67 .g-first,.g-tpl-33-67 .g-first{width:32.999%;float:left;margin:0}.g-tpl-33-67-alt .g-unit,.g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-tpl-33-67-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-unit{width:32.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-unit .g-tpl-33-67-alt .g-first,.g-unit .g-tpl-33-67-alt .g-first,.g-tpl-33-67-alt .g-first{width:66.999%;float:right;margin:0}.g-tpl-67-33 .g-unit,.g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-tpl-67-33 .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33 .g-unit{width:32.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-unit .g-tpl-67-33 .g-first,.g-unit .g-tpl-67-33 .g-first,.g-tpl-67-33 .g-first{width:66.999%;float:left;margin:0}.g-tpl-67-33-alt .g-unit,.g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-tpl-67-33-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-unit{width:66.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-unit .g-tpl-67-33-alt .g-first,.g-unit .g-tpl-67-33-alt .g-first,.g-tpl-67-33-alt .g-first{width:32.999%;float:right;margin:0}.g-tpl-50-50 .g-unit,.g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-tpl-50-50 .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50 .g-unit{width:49.999%;float:right;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-unit .g-tpl-50-50 .g-first,.g-unit .g-tpl-50-50 .g-first,.g-tpl-50-50 .g-first{width:49.999%;float:left;margin:0}.g-tpl-50-50-alt .g-unit,.g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-tpl-50-50-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-unit{width:49.999%;float:left;margin:0}.g-unit .g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-unit .g-tpl-50-50-alt .g-first,.g-unit .g-tpl-50-50-alt .g-first,.g-tpl-50-50-alt .g-first{width:49.999%;float:right;margin:0}.g-tpl-nest{width:auto}.g-tpl-nest .g-section{display:inline}.g-tpl-nest .g-unit,.g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-tpl-nest .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest .g-unit{float:left;width:auto;margin:0}.g-tpl-nest-alt .g-unit,.g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-tpl-nest-alt .g-unit,.g-unit .g-unit .g-unit .g-tpl-nest-alt .g-unit{float:right;width:auto;margin:0}.goog-button{border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;border-radius:2px;-webkit-border-radius:2px;-moz-border-radius:2px;font:normal normal normal 13px/13px Arial,sans-serif;color:#000;text-align:middle;text-decoration:none;text-shadow:0 1px 1px rgba(255,255,255,1);background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;margin:0;display:inline;display:-moz-inline-box;display:inline-block;*overflow:visible;padding:4px 8px 5px}a.goog-button,span.goog-button,div.goog-button{padding:4px 8px 5px}.goog-button:visited{color:#000}.goog-button{*display:inline}.goog-button:focus,.goog-button:hover{border-color:#000}.goog-button:active,.goog-button-active{color:#000;background-color:#bbb;border-color:#999 #bbb #bbb #999;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background-image:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-button[disabled],.goog-button[disabled]:active,.goog-button[disabled]:hover{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.goog-button{padding:5px 8px 4px\9}.goog-button{*padding:4px 7px 2px}html>body input.goog-button,x:-moz-any-link,x:default,html>body button.goog-button,x:-moz-any-link,x:default{padding-top:3px;padding-bottom:2px}a.goog-button,x:-moz-any-link,x:default,span.goog-button,x:-moz-any-link,x:default,div.goog-button,x:-moz-any-link,x:default{padding:4px 8px 5px}.goog-button-fixed{padding-left:0!important;padding-right:0!important;width:100%}button.goog-button-icon-c{padding-top:1px;padding-bottom:1px}button.goog-button-icon-c{padding-top:3px\9;padding-bottom:2px\9}button.goog-button-icon-c{*padding-top:0;*padding-bottom:0}html>body button.goog-button-icon-c,x:-moz-any-link,x:default{padding-top:1px;padding-bottom:1px}.goog-button-icon{display:block;margin:0 auto;height:18px;width:18px}html>body .goog-inline-block{display:-moz-inline-box;display:inline-block;}.goog-inline-block{position:relative;display:inline-block}* html .goog-inline-block{display:inline}*:first-child+html .goog-inline-block{display:inline}.goog-custom-button{margin:0 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;background:#eee;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd));background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0)}.goog-custom-button-outer-box,.goog-custom-button-inner-box{border-style:solid;border-color:#bbb #999 #999 #bbb;vertical-align:top}.goog-custom-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-custom-button-inner-box{margin:0 -1px;border-width:0 1px;padding:3px 4px}* html .goog-custom-button-inner-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-outer-box{left:-1px}* html .goog-custom-button-rtl .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-inner-box{left:-1px}*:first-child+html .goog-custom-button-collapse-right .goog-custom-button-inner-box{border-left-width:2px}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{border-right-width:2px}*:first-child+html .goog-custom-button-collapse-right.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-width:0 1px}*:first-child+html .goog-custom-button-rtl .goog-custom-button-inner-box{left:1px}::root .goog-custom-button,::root .goog-custom-button-outer-box{line-height:0}::root .goog-custom-button-inner-box{line-height:normal}.goog-custom-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-custom-button-disabled .goog-custom-button-outer-box,.goog-custom-button-disabled .goog-custom-button-inner-box{color:#333!important;border-color:#999!important}* html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-custom-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-custom-button-hover .goog-custom-button-outer-box,.goog-custom-button-hover .goog-custom-button-inner-box{border-color:#000!important;}.goog-custom-button-active,.goog-custom-button-checked{background-color:#bbb;background-position:bottom left;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ddd),to(#fff));background:-moz-linear-gradient(top,#ddd,#fff);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#ffffff',StartColorstr='#dddddd',GradientType=0)}.goog-custom-button-focused .goog-custom-button-outer-box,.goog-custom-button-focused .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left .goog-custom-button-inner-box,.goog-custom-button-focused.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-color:#000}.goog-custom-button-collapse-right,.goog-custom-button-collapse-right .goog-custom-button-outer-box,.goog-custom-button-collapse-right .goog-custom-button-inner-box{margin-right:0}.goog-custom-button-collapse-left,.goog-custom-button-collapse-left .goog-custom-button-outer-box,.goog-custom-button-collapse-left .goog-custom-button-inner-box{margin-left:0}.goog-custom-button-collapse-left .goog-custom-button-inner-box{border-left:1px solid #fff}.goog-custom-button-collapse-left.goog-custom-button-checked .goog-custom-button-inner-box{border-left:1px solid #ddd}* html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}*:first-child+html .goog-custom-button-collapse-left .goog-custom-button-inner-box{left:0}.goog-date-picker th,.goog-date-picker td{font-family:arial,sans-serif;text-align:center}.goog-date-picker th{font-size:.9em;font-weight:bold;color:#666667;background-color:#c3d9ff}.goog-date-picker td{vertical-align:middle;padding:2px 3px}.goog-date-picker{-moz-user-focus:normal;-moz-user-select:none;position:absolute;border:1px solid gray;float:left;font-family:arial,sans-serif;padding-left:1px;background:white}.goog-date-picker-menu{position:absolute;background:threedface;border:1px solid gray;-moz-user-focus:normal}.goog-date-picker-menu ul{list-style:none;margin:0;padding:0}.goog-date-picker-menu ul li{cursor:default}.goog-date-picker-menu-selected{background-color:#aaccee}.goog-date-picker td div{float:left}.goog-date-picker button{padding:0;margin:1px;border:1px outset gray}.goog-date-picker-week{padding:1px 3px}.goog-date-picker-wday{padding:1px 3px}.goog-date-picker-today-cont{text-align:left!important}.goog-date-picker-none-cont{text-align:right!important}.goog-date-picker-head td{text-align:center}.goog-date-picker-month{width:12ex}.goog-date-picker-year{width:6ex}.goog-date-picker table{border-collapse:collapse}.goog-date-picker-selected{background-color:#aaccee!important;color:blue!important}.goog-date-picker-today{font-weight:bold!important}.goog-date-picker-other-month{-moz-opacity:0.3;filter:Alpha(Opacity=30)}.sat,.sun{background:#eee}#button1,#button2{display:block;width:60px;text-align:center;margin:10px;padding:10px;font:normal .8em arial,sans-serif;border:1px solid #000}.goog-menu{position:absolute;color:#000;border:1px solid #b5b6b5;background-color:#f3f3f7;cursor:default;font:normal small arial,helvetica,sans-serif;margin:0;padding:0;outline:none}.goog-menuitem{padding:2px 5px;margin:0;list-style:none}.goog-menuitem-highlight{background-color:#4279a5;color:#fff}.goog-menuitem-disabled{color:#999}.goog-option{padding-left:15px!important}.goog-option-selected{background-image:url(/img/check.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menuseparator{position:relative;margin:2px 0;border-top:1px solid #999;padding:0;outline:none}.goog-submenu{position:relative}.goog-submenu-arrow{position:absolute;display:block;width:11px;height:11px;right:3px;top:4px;background-image:url(/img/menu-arrows.gif);background-repeat:no-repeat;background-position:0 0;font-size:1px}.goog-menuitem-highlight .goog-submenu-arrow{background-position:0 -11px}.goog-menuitem-disabled .goog-submenu-arrow{display:none}.goog-menu-filter{margin:2px;border:1px solid silver;background:white;overflow:hidden}.goog-menu-filter div{color:gray;position:absolute;padding:1px}.goog-menu-filter input{margin:0;border:0;background:transparent;width:100%}.goog-menuitem-partially-checked{background-image:url(/img/check-outline.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menuitem-fully-checked{background-image:url(/img/check.gif);background-position:4px 50%;background-repeat:no-repeat}.goog-menu-button{margin:0 2px 2px 2px;border:0;padding:0;font:normal Tahoma,Arial,sans-serif;color:#000;background:#ddd url("/img/button-bg.gif") repeat-x top left;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none}.goog-menu-button-outer-box,.goog-menu-button-inner-box{border-style:solid;border-color:#aaa;vertical-align:middle}.goog-menu-button-outer-box{margin:0;border-width:1px 0;padding:0}.goog-menu-button-inner-box{margin:0 -1px;border-width:0 1px;padding:0 4px 2px 4px}* html .goog-menu-button-inner-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-outer-box{left:-1px}* html .goog-menu-button-rtl .goog-menu-button-inner-box{left:0}*:first-child+html .goog-menu-button-inner-box{left:-1px}*:first-child+html .goog-menu-button-rtl .goog-menu-button-inner-box{left:1px}::root .goog-menu-button,::root .goog-menu-button-outer-box,::root .goog-menu-button-inner-box{line-height:0}::root .goog-menu-button-caption,::root .goog-menu-button-dropdown{line-height:normal}.goog-menu-button-disabled{background-image:none!important;opacity:0.4;-moz-opacity:0.4;filter:alpha(opacity=40)}.goog-menu-button-disabled .goog-menu-button-outer-box,.goog-menu-button-disabled .goog-menu-button-inner-box,.goog-menu-button-disabled .goog-menu-button-caption,.goog-menu-button-disabled .goog-menu-button-dropdown{color:#333!important;border-color:#999!important}* html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}*:first-child+html .goog-menu-button-disabled{margin:2px 1px!important;padding:0 1px!important}.goog-menu-button-hover .goog-menu-button-outer-box,.goog-menu-button-hover .goog-menu-button-inner-box{border-color:#9cf #69e #69e #7af!important;}.goog-menu-button-active,.goog-menu-button-open{background-color:#bbb;background-position:bottom left}.goog-menu-button-focused .goog-menu-button-outer-box,.goog-menu-button-focused .goog-menu-button-inner-box{border-color:#3366cc}.goog-menu-button-caption{padding:0 4px 0 0;vertical-align:middle}.goog-menu-button-rtl .goog-menu-button-caption{padding:0 0 0 4px}.goog-menu-button-dropdown{width:7px;background:url(/img/toolbar_icons.gif) no-repeat -176px;vertical-align:middle}.goog-flat-menu-button{margin:0 2px;padding:1px 4px;font:normal 95% Tahoma,Arial,sans-serif;color:#333;text-decoration:none;list-style:none;vertical-align:middle;cursor:pointer;outline:none;-moz-outline:none;border-width:1px;border-style:solid;border-color:#c9c9c9;background-color:#fff}.goog-flat-menu-button-disabled *{color:#999;border-color:#ccc;cursor:default}.goog-flat-menu-button-hover,.goog-flat-menu-button-hover{border-color:#9cf #69e #69e #7af!important;}.goog-flat-menu-button-active{background-color:#bbb;background-position:bottom left}.goog-flat-menu-button-focused{border-color:#3366cc}.goog-flat-menu-button-caption{padding-right:10px;vertical-align:middle}.goog-flat-menu-button-dropdown{width:7px;background:url(/img/toolbar_icons.gif) no-repeat -176px;vertical-align:middle}h1{font-size:1.8em}.g-doc{width:auto;margin:0 10px}.g-doc-1024{margin-left:10px}#ae-logo{background:url(//www.google.com/images/logos/app_engine_logo_sm.gif) 0 0 no-repeat;display:block;width:178px;height:30px;margin:4px 0 0 0}.ae-ir span{position:absolute;display:block;width:0;height:0;overflow:hidden}.ae-noscript{position:absolute;left:-5000px}#ae-lhs-nav{border-right:3px solid #e5ecf9}.ae-notification{margin-bottom:.6em;text-align:center}.ae-notification strong{display:block;width:55%;margin:0 auto;text-align:center;padding:.6em;background-color:#fff1a8;font-weight:bold}.ae-alert{font-weight:bold;background:url(/img/icn/warning.png) no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-info{background:url(/img/icn/icn-info.gif) no-repeat;margin-bottom:.5em;padding-left:1.8em}.ae-promo{padding:.5em .8em;margin:.6em 0;background-color:#fffbe8;border:1px solid #fff1a9;text-align:left}.ae-promo strong{position:relative;top:.3em}.ae-alert-text,.ae-warning-text{background-color:transparent;background-position:right 1px;padding:0 18px 0 0}.ae-alert-text{color:#c00}.ae-warning-text{color:#f90}.ae-alert-c span{display:inline-block}.ae-message{border:1px solid #e5ecf9;background-color:#f6f9ff;margin-bottom:1em;padding:.5em}.ae-errorbox{border:1px solid #f00;background-color:#fee;margin-bottom:1em;padding:1em}#bd .ae-errorbox ul{padding-bottom:0}.ae-form dt{font-weight:bold}.ae-form dt em,.ae-field-hint{margin-top:.2em;color:#666667;font-size:.85em}.ae-field-yyyymmdd,.ae-field-hhmmss{width:6em}.ae-field-hint-hhmmss{margin-left:2.3em}.ae-form label{display:block;margin:0 0 .2em 0;font-weight:bold}.ae-radio{margin-bottom:.3em}.ae-radio label{display:inline}.ae-form dd,.ae-input-row{margin-bottom:.6em}.ae-input-row-group{border:1px solid #fff1a9;background:#fffbe8;padding:8px}.ae-btn-row{margin-top:1.4em;margin-bottom:1em}.ae-btn-row-note{padding:5px 0 6px 0}.ae-btn-row-note span{padding-left:18px;padding-right:.5em;background:transparent url(/img/icn/icn-info.gif) 0 0 no-repeat}.ae-btn-primary{font-weight:bold}form .ae-cancel{margin-left:.5em}.ae-submit-inline{margin-left:.8em}.ae-radio-bullet{width:20px;float:left}.ae-label-hanging-indent{margin-left:5px}.ae-divider{margin:0 .6em 0 .5em}.ae-nowrap{white-space:nowrap}.ae-pre-wrap{white-space:pre-wrap;white-space:-moz-pre-wrap;white-space:-pre-wrap;white-space:-o-pre-wrap;word-wrap:break-word;_white-space:pre;}wbr:after{content:"\00200B"}a button{text-decoration:none}.ae-alert ul{margin-bottom:.75em;margin-top:.25em;line-height:1.5em}.ae-alert h4{color:#000;font-weight:bold;padding:0 0 .5em}.ae-form-simple-list{list-style-type:none;padding:0;margin-bottom:1em}.ae-form-simple-list li{padding:.3em 0 .5em .5em;border-bottom:1px solid #c3d9ff}div.ae-datastore-index-to-delete,div.ae-datastore-index-to-build{color:#aaa}#hd p{padding:0}#hd li{display:inline}ul{padding:0 0 1em 1.2em}#ae-userinfo{text-align:right;white-space:nowrap;}#ae-userinfo ul{padding-bottom:0;padding-top:5px}#ae-appbar-lrg{margin:0 0 1.25em 0;padding:.25em .5em;background-color:#e5ecf9;border-top:1px solid #36c}#ae-appbar-lrg h1{font-size:1.2em;padding:0}#ae-appbar-lrg h1 span{font-size:80%;font-weight:normal}#ae-appbar-lrg form{display:inline;padding-right:.1em;margin-right:.5em}#ae-appbar-lrg strong{white-space:nowrap}#ae-appbar-sml{margin:0 0 1.25em 0;height:8px;padding:0 .5em;background:#e5ecf9}.ae-rounded-sml{border-radius:3px;-moz-border-radius:3px;-webkit-border-radius:3px}#ae-appbar-lrg a{margin-top:.3em}a.ae-ext-link,a span.ae-ext-link{background:url(/img/icn/icn-open-in-new-window.png) no-repeat right;padding-right:18px;margin-right:8px}.ae-no-pad{padding-left:1em}.ae-message h4{margin-bottom:.3em;padding-bottom:0}#ft{text-align:center;margin:2.5em 0 1em;padding-top:.5em;border-top:2px solid #c3d9ff}#bd h3{font-weight:bold;font-size:1.4em}#bd h3 .ae-apps-switch{font-weight:normal;font-size:.7em;margin-left:2em}#bd p{padding:0 0 1em 0}#ae-content{padding-left:1em}.ae-unimportant{color:#666}.ae-new-usr td{border-top:1px solid #ccccce;background-color:#ffe}.ae-error-td td{border:2px solid #f00;background-color:#fee}.ae-delete{cursor:pointer;border:none;background:transparent;}.ae-btn-large{background:#039 url(/img/icn/button_back.png) repeat-x;color:#fff;font-weight:bold;font-size:1.2em;padding:.5em;border:2px outset #000;cursor:pointer}.ae-breadcrumb{margin:0 0 1em}.ae-disabled,a.ae-disabled,a.ae-disabled:hover,a.ae-disabled:active{color:#666!important;text-decoration:none!important;cursor:default!important;opacity:.4!important;-moz-opacity:.4!important;filter:alpha(opacity=40)!important}input.ae-readonly{border:2px solid transparent;border-left:0;background-color:transparent}span.ae-text-input-clone{padding:5px 5px 5px 0}.ae-loading{opacity:.4;-moz-opacity:.4;filter:alpha(opacity=40)}.ae-tip{margin:1em 0;background:url(/img/tip.png) top left no-repeat;padding:2px 0 0 25px}sup.ae-new-sup{color:red}.ae-action{color:#00c;cursor:pointer;text-decoration:underline}.ae-toggle{padding-left:16px;background-position:left center;background-repeat:no-repeat;cursor:pointer}.ae-minus{background-image:url(/img/wgt/minus.gif)}.ae-plus{background-image:url(/img/wgt/plus.gif)}.ae-print{background-image:url(/img/print.gif);padding-left:19px}.ae-currency,.ae-table thead th.ae-currency{text-align:right;white-space:nowrap}#ae-loading{font-size:1.2em;position:absolute;text-align:center;top:0;width:100%}#ae-loading div{margin:0 auto;background:#fff1a9;width:5em;font-weight:bold;padding:4px 10px;-moz-border-radius-bottomleft:3px;-moz-border-radius-bottomright:3px;-webkit-border-radius-bottomleft:3px;-webkit-border-radius-bottomright:3px}.ae-occlude{filter:alpha(opacity=0);position:absolute}.g-tpl-66-34 .g-unit,.g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-tpl-66-34 .g-unit,.g-unit .g-unit .g-unit .g-tpl-66-34 .g-unit{display:inline;margin:0;width:33.999%;float:right}.g-unit .g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-unit .g-tpl-66-34 .g-first,.g-unit .g-tpl-66-34 .g-first,.g-tpl-66-34 .g-first{display:inline;margin:0;width:65.999%;float:left}.ae-ie6-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}h2.ae-section-header{background:#e5ecf9;padding:.2em .4em;margin-bottom:.5em}.ae-field-span{padding:3px 0}select{font:13px/13px Arial,sans-serif;color:#000;border-width:1px;border-style:solid;border-color:#bbb #999 #999 #bbb;-webkit-border-radius:2px;-moz-border-radius:2px;background:#eee;background:-moz-linear-gradient(top,#fff,#ddd);filter:progid:DXImageTransform.Microsoft.Gradient(EndColorstr='#dddddd',StartColorstr='#ffffff',GradientType=0);cursor:pointer;padding:2px 1px;margin:0}select:hover{border-color:#000}select[disabled],select[disabled]:active{color:#666;border-color:#ddd;background-color:#f3f3f3;background-image:none;text-shadow:none;cursor:auto}.ae-table-plain{border-collapse:collapse;width:100%}.ae-table{border:1px solid #c5d7ef;border-collapse:collapse;width:100%}#bd h2.ae-table-title{background:#e5ecf9;margin:0;color:#000;font-size:1em;padding:3px 0 3px 5px;border-left:1px solid #c5d7ef;border-right:1px solid #c5d7ef;border-top:1px solid #c5d7ef}.ae-table-caption,.ae-table caption{border:1px solid #c5d7ef;background:#e5ecf9;-moz-margin-start:-1px}.ae-table caption{padding:3px 5px;text-align:left}.ae-table th,.ae-table td{background-color:#fff;padding:.35em 1em .25em .35em;margin:0}.ae-table thead th{font-weight:bold;text-align:left;background:#c5d7ef;vertical-align:bottom}.ae-table thead th .ae-no-bold{font-weight:normal}.ae-table tfoot tr td{border-top:1px solid #c5d7ef;background-color:#e5ecf9}.ae-table td{border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even>td,.ae-even th,.ae-even-top td,.ae-even-tween td,.ae-even-bottom td,ol.ae-even{background-color:#e9e9e9;border-top:1px solid #c5d7ef;border-bottom:1px solid #c5d7ef}.ae-even-top td{border-bottom:0}.ae-even-bottom td{border-top:0}.ae-even-tween td{border:0}.ae-table .ae-tween td{border:0}.ae-table .ae-tween-top td{border-bottom:0}.ae-table .ae-tween-bottom td{border-top:0}#bd .ae-table .cbc{width:1.5em;padding-right:0}.ae-table #ae-live td{background-color:#ffeac0}.ae-table-fixed{table-layout:fixed}.ae-table-fixed td,.ae-table-nowrap{overflow:hidden;white-space:nowrap}.ae-paginate strong{margin:0 .5em}tfoot .ae-paginate{text-align:right}.ae-table-caption .ae-paginate,.ae-table-caption .ae-orderby{padding:2px 5px}.modal-dialog{background:#c1d9ff;border:1px solid #3a5774;color:#000;padding:4px;position:absolute;font-size:1.3em;-moz-box-shadow:0 1px 4px #333;-webkit-box-shadow:0 1px 4px #333;box-shadow:0 1px 4px #333}.modal-dialog a,.modal-dialog a:link,.modal-dialog a:visited{color:#06c;cursor:pointer}.modal-dialog-bg{background:#666;left:0;position:absolute;top:0}.modal-dialog-title{background:#e0edfe;color:#000;cursor:pointer;font-size:120%;font-weight:bold;padding:8px 15px 8px 8px;position:relative;_zoom:1;}.modal-dialog-title-close{background:#e0edfe url(https://ssl.gstatic.com/editor/editortoolbar.png) no-repeat -528px 0;cursor:default;height:15px;position:absolute;right:10px;top:8px;width:15px;vertical-align:middle}.modal-dialog-buttons,.modal-dialog-content{background-color:#fff;padding:8px}.modal-dialog-buttons button{margin-right:.75em}.goog-buttonset-default{font-weight:bold}.goog-tab{position:relative;border:1px solid #8ac;padding:4px 9px;color:#000;background:#e5ecf9;border-top-left-radius:2px;border-top-right-radius:2px;-moz-border-radius-topleft:2px;-webkit-border-top-left-radius:2px;-moz-border-radius-topright:2px;-webkit-border-top-right-radius:2px}.goog-tab-bar-top .goog-tab{margin:1px 4px 0 0;border-bottom:0;float:left}.goog-tab-bar-bottom .goog-tab{margin:0 4px 1px 0;border-top:0;float:left}.goog-tab-bar-start .goog-tab{margin:0 0 4px 1px;border-right:0}.goog-tab-bar-end .goog-tab{margin:0 1px 4px 0;border-left:0}.goog-tab-hover{text-decoration:underline;cursor:pointer}.goog-tab-disabled{color:#fff;background:#ccc;border-color:#ccc}.goog-tab-selected{background:#fff!important;color:black;font-weight:bold}.goog-tab-bar-top .goog-tab-selected{top:1px;margin-top:0;padding-bottom:5px}.goog-tab-bar-bottom .goog-tab-selected{top:-1px;margin-bottom:0;padding-top:5px}.goog-tab-bar-start .goog-tab-selected{left:1px;margin-left:0;padding-right:9px}.goog-tab-bar-end .goog-tab-selected{left:-1px;margin-right:0;padding-left:9px}.goog-tab-content{padding:.1em .8em .8em .8em;border:1px solid #8ac;border-top:none}.goog-tab-bar{position:relative;margin:0 0 0 5px;border:0;padding:0;list-style:none;cursor:default;outline:none}.goog-tab-bar-clear{border-top:1px solid #8ac;clear:both;height:0;overflow:hidden}.goog-tab-bar-start{float:left}.goog-tab-bar-end{float:right}* html .goog-tab-bar-start{margin-right:-3px}* html .goog-tab-bar-end{margin-left:-3px}#ae-nav ul{list-style-type:none;margin:0;padding:1em 0}#ae-nav ul li{padding-left:.5em}#ae-nav .ae-nav-selected{color:#000;display:block;font-weight:bold;background-color:#e5ecf9;margin-right:-1px;border-top-left-radius:4px;-moz-border-radius-topleft:4px;-webkit-border-top-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;-webkit-border-bottom-left-radius:4px}#ae-nav .ae-nav-bold{font-weight:bold}#ae-nav ul li span.ae-nav-disabled{color:#666}#ae-nav ul ul{margin:0;padding:0 0 0 .5em}#ae-nav ul ul li{padding-left:.5em}#ae-nav ul li a,#ae-nav ul li span,#ae-nav ul ul li a{padding-left:.5em}#ae-nav li a:link,#ae-nav li a:visited{color:#00c}.ae-nav-group{padding:.5em;margin:0 .75em 0 0;background-color:#fffbe8;border:1px solid #fff1a9}.ae-nav-group h4{font-weight:bold;padding:auto auto .5em .5em;padding-left:.4em;margin-bottom:.5em;padding-bottom:0}.ae-nav-group ul{margin:0 0 .5em 0;padding:0 0 0 1.3em;list-style-type:none}.ae-nav-group ul li{padding-bottom:.5em}.ae-nav-group li a:link,.ae-nav-group li a:visited{color:#00c}.ae-nav-group li a:hover{color:#00c}@media print{body{font-size:13px;width:8.5in;background:#fff}table,.ae-table-fixed{table-layout:automatic}tr{display:table-row!important}.g-doc-1024{width:8.5in}#ae-appbar-lrg,.ae-table-caption,.ae-table-nowrap,.ae-nowrap,th,td{overflow:visible!important;white-space:normal!important;background:#fff!important}.ae-print,.ae-toggle{display:none}#ae-lhs-nav-c{display:none}#ae-content{margin:0;padding:0}.goog-zippy-collapsed,.goog-zippy-expanded{background:none!important;padding:0!important}}#ae-admin-dev-table{margin:0 0 1em 0}.ae-admin-dev-tip,.ae-admin-dev-tip.ae-tip{margin:-0.31em 0 2.77em}#ae-sms-countryselect{margin-right:.5em}#ae-admin-enable-form{margin-bottom:1em}#ae-admin-services-c{margin-top:2em}#ae-admin-services{padding:0 0 0 3em;margin-bottom:1em;font-weight:bold}#ae-admin-logs-table-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-admin-logs-table{margin:0;padding:0}#ae-admin-logs-filters{padding:3px 0 3px 5px}#ae-admin-logs-pagination{padding:6px 5px 0 0;text-align:right;width:45%}#ae-admin-logs-pagination span.ae-disabled{color:#666;background-color:transparent}#ae-admin-logs-table td{white-space:nowrap}#ae-storage-content div.ae-alert{padding-bottom:5px}#ae-admin-performance-form input[type=text]{width:2em}.ae-admin-performance-value{font-weight:normal}.ae-admin-performance-static-value{color:#666}.goog-slider-horizontal,.goog-twothumbslider-horizontal{position:relative;width:502px;height:7px;display:block;outline:0;margin:1.0em 0 0.9em 3em}.ae-slider-rail:before{position:relative;top:-0.462em;float:left;content:'Min';margin:0 0 0 -3em;color:#999}.ae-slider-rail{position:absolute;background-color:#d9d9d9;top:0;right:8px;bottom:0;left:8px;border:solid 1px;border-color:#a6a6a6 #b3b3b3 #bfbfbf;border-radius:5px}.ae-slider-rail:after{position:relative;top:-0.462em;float:right;content:'Max';margin:0 -3em 0 0;color:#999}.goog-slider-horizontal .goog-slider-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb,.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{position:absolute;width:17px;height:17px;background:transparent url(/img/slider_thumb-down.png) no-repeat;outline:0}.goog-slider-horizontal .goog-slider-thumb{top:-5px}.goog-twothumbslider-horizontal .goog-twothumbslider-value-thumb{top:-11px}.goog-twothumbslider-horizontal .goog-twothumbslider-extent-thumb{top:2px;background-image:url(/img/slider_thumb-up.png)}.ae-admin-performance-scale{position:relative;display:inline-block;width:502px;margin:0 0 2.7em 3em}.ae-admin-performance-scale .ae-admin-performance-scale-start{position:absolute;display:inline-block;top:0;width:100%;text-align:left}.ae-admin-performance-scale .ae-admin-performance-scale-mid{position:absolute;display:inline-block;top:0;width:100%;text-align:center}.ae-admin-performance-scale .ae-admin-performance-scale-end{position:absolute;display:inline-block;top:0;width:100%;text-align:right}.ae-absolute-container{display:inline-block;width:100%}.ae-hidden-range{display:none}.ae-default-version-radio-column{width:1em}#ae-billing-form-c{_margin-right:-3000px;_position:relative;_width:100%}.ae-rounded-top-small{-moz-border-radius-topleft:3px;-webkit-border-top-left-radius:3px;-moz-border-radius-topright:3px;-webkit-border-top-right-radius:3px}.ae-progress-content{height:400px}#ae-billing-tos{text-align:left;width:100%;margin-bottom:.5em}.ae-billing-budget-section{margin-bottom:1.5em}.ae-billing-budget-section .g-unit,.g-unit .ae-billing-budget-section .g-unit,.g-unit .g-unit .ae-billing-budget-section .g-unit{margin:0 0 0 11em;width:auto;float:none}.g-unit .g-unit .ae-billing-budget-section .g-first,.g-unit .ae-billing-budget-section .g-first,.ae-billing-budget-section .g-first{margin:0;width:11em;float:left}#ae-billing-form .ae-btn-row{margin-left:11em}#ae-billing-form .ae-btn-row .ae-info{margin-top:10px}#ae-billing-checkout{width:150px;float:left}#ae-billing-alloc-table{border:1px solid #c5d7ef;border-bottom:none;width:100%;margin-top:.5em}#ae-billing-alloc-table th,#ae-billing-alloc-table td{padding:.35em 1em .25em .35em;border-bottom:1px solid #c5d7ef;color:#000;white-space:nowrap}.ae-billing-resource{background-color:transparent;font-weight:normal}#ae-billing-alloc-table tr th span{font-weight:normal}#ae-billing-alloc-table tr{vertical-align:baseline}#ae-billing-alloc-table th{white-space:nowrap}#ae-billing-alloc-table .ae-editable span.ae-text-input-clone,#ae-billing-alloc-table .ae-readonly input{display:none}#ae-billing-alloc-table .ae-readonly span.ae-text-input-clone,#ae-billing-alloc-table .ae-editable input{display:inline}#ae-billing-alloc-table td span.ae-billing-warn-note,#ae-billing-table-errors .ae-billing-warn-note{margin:0;background-repeat:no-repeat;display:inline-block;background-image:url(/img/icn/warning.png);text-align:right;padding-left:16px;padding-right:.1em;height:16px;font-weight:bold}#ae-billing-alloc-table td span.ae-billing-warn-note span,#ae-billing-table-errors .ae-billing-warn-note span{vertical-align:super;font-size:80%}#ae-billing-alloc-table td span.ae-billing-error-hidden,#ae-billing-table-errors .ae-billing-error-hidden{display:none}.ae-billing-percent{font-size:80%;color:#666;margin-left:3px}#ae-billing-week-info{margin-top:5px;line-height:1.4}#ae-billing-table-errors{margin-top:.3em}#ae-billing-allocation-noscript{margin-top:1.5em}#ae-billing-allocation-custom-opts{margin-left:2.2em}#ae-billing-settings h2{font-size:1em;display:inline}#ae-billing-settings p{padding:.3em 0 .5em}#ae-billing-settings-table{margin:.4em 0 .5em}#ae-settings-resource-col{width:19%}#ae-settings-budget-col{width:11%}#ae-billing-settings-table .ae-settings-budget-col{padding-right:2em}.ae-table th.ae-settings-unit-cell,.ae-table td.ae-settings-unit-cell,.ae-table th.ae-total-unit-cell,.ae-table td.ae-total-unit-cell{padding-left:1.2em}#ae-settings-unit-col{width:18%}#ae-settings-paid-col{width:15%}#ae-settings-free-col{width:15%}#ae-settings-total-col{width:22%}.ae-billing-inline-link{margin-left:.5em}.ae-billing-settings-section{margin-bottom:2em}.ae-billing-settings-formbutton{margin-top:.5em}#ae-billing-budget-setup-checkout{margin-bottom:0}#ae-billing-vat-c .ae-field-hint{width:85%}#ae-billing-checkout-note{margin-top:.8em}.ae-drachma-preset{background-color:#f6f9ff;margin-left:11em}.ae-drachma-preset p{margin-top:.5em}.ae-table thead th.ae-currency-th{text-align:right}#ae-billing-logs-date{width:15%}#ae-billing-logs-event{width:69%}#ae-billing-logs-amount{text-align:right;width:8%}#ae-billing-logs-balance{text-align:right;width:8%}#ae-billing-history-expand .ae-action{margin-left:1em}.ae-table .ae-billing-usage-report{width:100%;*width:auto;margin:0 0 1em 0}.ae-table .ae-billing-usage-report th,.ae-billing-charges th{color:#666;border-top:0}.ae-table .ae-billing-usage-report th,.ae-table .ae-billing-usage-report td,.ae-billing-charges th,.ae-billing-charges td{background-color:transparent;padding:.4em 0;border-bottom:1px solid #ddd}.ae-table .ae-billing-usage-report tfoot td,.ae-billing-charges tfoot td{border-bottom:none}.ae-billing-report-resource{width:30%}.ae-billing-report-used{width:20%}.ae-billing-report-free{width:16%}.ae-billing-report-paid{width:17%}.ae-billing-report-charge{width:17%}.ae-billing-change-resource{width:85%}.ae-billing-change-budget{width:15%}#ae-billing-always-on-label{display:inline}#ae-billing-budget-buffer-label{display:inline}.ae-billing-charges{width:50%}.ae-billing-charges-charge{text-align:right}.ae-billing-usage-report-container{padding:1em 1em 0 1em}#ae-billing-new-usage{background-color:#f6f9ff}.goog-zippy-expanded{background-image:url(/img/wgt/minus.gif);cursor:pointer;background-repeat:no-repeat;padding-left:17px}.goog-zippy-collapsed{background-image:url(/img/wgt/plus.gif);cursor:pointer;background-repeat:no-repeat;padding-left:17px}#ae-admin-logs-pagination{width:auto}.ae-usage-cycle-note{color:#555}#ae-createapp-start{background-color:#c6d5f1;padding:1em;padding-bottom:2em;text-align:center}#ae-admin-app_id_alias-check,#ae-createapp-id-check{margin:0 0 0 1em}#ae-admin-app_id_alias-message{display:block;margin:.4em 0}#ae-createapp-id-content{width:100%}#ae-createapp-id-content td{vertical-align:top}#ae-createapp-id-td{white-space:nowrap;width:1%}#ae-createapp-id-td #ae-createapp-id-error{position:absolute;width:24em;padding-left:1em;white-space:normal}#ae-createapp-id-error-td{padding-left:1em}#ae-admin-dev-invite label{float:left;width:3.6em;position:relative;top:.3em}#ae-admin-dev-invite .ae-radio{margin-left:3.6em}#ae-admin-dev-invite .ae-radio label{float:none;width:auto;font-weight:normal;position:static}#ae-admin-dev-invite .goog-button{margin-left:3.6em}#ae-admin-dev-invite .ae-field-hint{margin-left:4.2em}#ae-admin-dev-invite .ae-radio .ae-field-hint{margin-left:0}.ae-you{color:#008000}#ae-authdomain-opts{margin-bottom:1em}#ae-authdomain-content .ae-input-text,#ae-authdomain-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-authdomain-opts a{margin-left:1em}#ae-authdomain-opts-hint{margin-top:.2em;color:#666667;font-size:.85em}#ae-authdomain-content #ae-authdomain-desc .ae-field-hint{margin-left:0}#ae-storage-opts{margin-bottom:1em}#ae-storage-content .ae-input-text,#ae-storage-content .ae-field-hint{margin:.3em 0 .4em 2.5em}#ae-storage-opts a{margin-left:1em}#ae-storage-opts-hint{margin-top:.2em;color:#666667;font-size:.85em}#ae-storage-content #ae-storage-desc .ae-field-hint{margin-left:0}#ae-dash .g-section{margin:0 0 1em}#ae-dash * .g-section{margin:0}#ae-dash-quota .ae-alert{padding-left:1.5em}.ae-dash-email-disabled{background:url(/img/icn/exclamation_circle.png) no-repeat;margin-top:.5em;margin-bottom:.5em;min-height:16px;padding-left:1.5em}#ae-dash-email-disabled-footnote{padding-left:1.5em;margin:5px 0 0;font-weight:normal}#ae-dash-graph-c{border:1px solid #c5d7ef;padding:5px 0}#ae-dash-graph-change{margin:0 0 0 5px}#ae-dash-graph-img{padding:5px;margin-top:.5em;background-color:#fff;display:block}#ae-dash-graph-nodata{text-align:center}#ae-dash .ae-logs-severity{margin-right:.5em}#ae-dash .g-c{padding:0 0 0 .1em}#ae-dash .g-tpl-50-50 .g-unit .g-c{padding:0 0 0 1em}#ae-dash .g-tpl-50-50 .g-first .g-c{padding:0 1em 0 .1em}.ae-quota-warnings{background-color:#fffbe8;margin:0;padding:.5em .5em 0;text-align:left}.ae-quota-warnings div{padding:0 0 .5em}#ae-dash-quota-refresh-info{font-size:85%}#ae-dash #ae-dash-quota-bar-col,#ae-dash .ae-dash-quota-bar{width:100px}#ae-dash-quotadetails #ae-dash-quota-bar-col,#ae-dash-quotadetails .ae-dash-quota-bar{width:200px}#ae-dash-quota-percent-col{width:3.5em}#ae-dash-quota-cost-col{width:15%}#ae-dash-quota-alert-col{width:1%}#ae-dash .ae-dash-quota-alert-td{padding:0}.ae-dash-quota-alert-td a{display:block;width:16px;height:16px}#ae-dash .ae-dash-quota-alert-td .ae-alert{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash .ae-dash-quota-alert-td .ae-dash-email-disabled{display:block;width:16px;height:16px;margin:0;padding:0}#ae-dash-quota tbody th{font-weight:normal}#ae-dash-quota caption{padding:0}#ae-dash-quota caption .g-c{padding:3px}.ae-dash-quota-bar{float:left;background-color:#c0c0c0;height:13px;margin:.1em 0 0 0;position:relative}.ae-dash-quota-bar-free{background:url(/img/free_marker.png) top left no-repeat;width:7px;height:13px;position:absolute;top:0;left:0}#ae-dash-quota-footnote{margin:5px 0 0;font-weight:normal}.ae-quota-warning{background-color:#f90}.ae-quota-alert{background-color:#c00}.ae-quota-normal{background-color:#0b0}.ae-quota-alert-text{color:#c00}.ae-favicon-text{font-size:.85em}#ae-dash-popular{width:97%}#ae-dash-popular-reqsec-col{width:6.5em}#ae-dash-popular-req-col{width:7em}#ae-dash-popular-cpu-avg-col{width:9.5em}#ae-dash-popular-cpu-percent-col{width:7em}#ae-dash-popular .ae-unimportant{font-size:80%}#ae-dash-popular .ae-nowrap,#ae-dash-errors .ae-nowrap{margin-right:5px;overflow:hidden}#ae-dash-popular th span,#ae-dash-errors th span{font-size:.8em;font-weight:normal;display:block}#ae-dash-errors caption .g-unit{width:9em}#ae-dash-errors-count-col{width:5em}#ae-dash-errors-percent-col{width:7em}#ae-dash-graph-chart-type{float:left;margin-right:1em}#ae-apps-all strong.ae-disabled{color:#000;background:#eee}.ae-quota-resource{width:30%}.ae-quota-safety-limit{width:10%}#ae-quota-details h3{padding-bottom:0;margin-bottom:.25em}#ae-quota-details table{margin-bottom:1.75em}#ae-quota-details table.ae-quota-requests{margin-bottom:.5em}#ae-quota-refresh-note p{text-align:right;padding-top:.5em;padding-bottom:0;margin-bottom:0}#ae-quota-first-api.g-section{padding-bottom:0;margin-bottom:.25em}#ae-instances-summary-table,#ae-instances-details-table{margin-bottom:1em}.ae-instances-details-availability-image{float:left;margin-right:.5em}.ae-instances-small-text{font-size:80%}.ae-instances-small-text .ae-separator{color:#666}.ae-appbar-superuser-message strong{color:red}#ae-backends-table tr{vertical-align:baseline}.ae-backends-class-reminder{font-size:80%;color:#666;margin-left:3px}#ae-datastore-explorer-c{_margin-right:-3000px;_position:relative;_width:100%}#ae-datastore-explorer form dt{margin:1em 0 0 0}#ae-datastore-explorer #ae-datastore-explorer-labels{margin:0 0 3px}#ae-datastore-explorer-header .ae-action{margin-left:1em}#ae-datastore-explorer .id{white-space:nowrap}#ae-datastore-explorer caption{text-align:right;padding:5px}#ae-datastore-explorer-submit{margin-top:5px}#ae-datastore-explorer-namespace{margin-top:7px;margin-right:5px}#ae-datastore-explorer-gql-spacer{margin-top:22px}h4 #ae-datastore-explorer-gql-label{font-weight:normal}#ae-datastore-form em{font-style:normal;font-weight:normal;margin:0 0 0 .2em;color:#666}#ae-datastore-form dt{font-weight:bold}#ae-datastore-form dd{margin:.4em 0 .3em 1.5em;overflow:auto;zoom:1}#ae-datastore-form dd em{width:4em;float:left}#ae-datastore-form dd.ae-last{margin-bottom:1em}#ae-datastore-explorer-tabs-content{margin-bottom:1em}#ae-datastore-explorer-list .ae-label-row,#ae-datastore-explorer-new .ae-label-row{float:left;padding-top:.2em}#ae-datastore-explorer-list .ae-input-row,#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-input-row,#ae-datastore-explorer-new .ae-btn-row{margin-left:6em}#ae-datastore-explorer-list .ae-btn-row,#ae-datastore-explorer-new .ae-btn-row{margin-bottom:0}.ae-datastore-index-name{font-size:1.2em;font-weight:bold}.ae-table .ae-datastore-index-defs{padding-left:20px}.ae-datastore-index-defs-row{border-top:1px solid #ddd}.ae-datastore-index-defs .ae-unimportant{font-size:.8em}.ae-datastore-index-status{border:1px solid #c0dfbf;background:#f3f7f3;margin:0 25px 0 0;padding:3px}#ae-datastore-index-status-col{width:15%}.ae-datastore-index-status-Building{border-color:#edebcd;background:#fefdec}.ae-datastore-index-status-Deleting{border-color:#ccc;background:#eee}.ae-datastore-index-status-Error{border-color:#ffd3b4;background:#ffeae0}.ae-datastore-pathlink{font-size:.9em}#ae-datastore-stats-top-level-c{padding-bottom:1em;margin-bottom:1em;border-bottom:1px solid #e5ecf9}#ae-datastore-stats-top-level{width:100%}#ae-datastore-stats-piecharts-c{margin-bottom:1em}.ae-datastore-stats-piechart-label{font-size:.85em;font-weight:normal;text-align:center;padding:0}#ae-datastore-stats-property-type{width:65%}#ae-datastore-stats-size-all{width:35%}#ae-datastore-stats-property-name{width:60%}#ae-datastore-stats-type{width:10%}#ae-datastore-stats-size-entity{width:30%}#ae-datastore-blob-filter-form{margin-bottom:1em}#ae-datastore-blob-query-filter-label{padding-right:.5em}#ae-datastore-blob-filter-contents{padding-top:.5em}#ae-datastore-blob-date-after,#ae-datastore-blob-date-before{float:left}#ae-datastore-blob-date-after{margin-right:1em}#ae-datastore-blob-order label{font-weight:normal}#ae-datastore-blob-col-check{width:2%}#ae-datastore-blob-col-file{width:45%}#ae-datastore-blob-col-type{width:14%}#ae-datastore-blob-col-size{width:16%}#ae-blobstore-col-date{width:18%}#ae-blob-detail-filename{padding-bottom:0}#ae-blob-detail-filename span{font-weight:normal}#ae-blob-detail-key{font-size:85%}#ae-blob-detail-preview{margin-top:1em}#ae-blob-detail-dl{text-align:right}#ae-domain-admins-list li{margin-bottom:.3em}#ae-domain-admins-list button{margin-left:.5em}#ae-new-app-dialog-c{width:500px}#ae-new-app-dialog-c .g-section{margin-bottom:1em}p.light-note{color:#555}.ae-bottom-message{margin-top:1em}#domsettings-form div.ae-radio{margin-left:1.7em}#domsettings-form div.ae-radio input{margin-left:-1.47em;float:left}#ae-logs-c{_margin-right:-2000px;_position:relative;_width:100%;background:#fff}#ae-logs{background-color:#c5d7ef;padding:1px;line-height:1.65}#ae-logs .ae-table-caption{border:0}#ae-logs-c ol,#ae-logs-c li{list-style:none;padding:0;margin:0}#ae-logs-c li li{margin:0 0 0 3px;padding:0 0 0 17px}.ae-log-noerror{padding-left:23px}#ae-logs-form .goog-inline-block{margin-top:0}.ae-logs-reqlog .snippet{margin:.1em}.ae-logs-applog .snippet{color:#666}.ae-logs-severity{display:block;float:left;height:1.2em;width:1.2em;line-height:1.2;text-align:center;text-transform:capitalize;font-weight:bold;border-radius:2px;-moz-border-radius:2px;-webkit-border-radius:2px}.ae-logs-severity-4{background-color:#f22;color:#000}.ae-logs-severity-3{background-color:#f90;color:#000}.ae-logs-severity-2{background-color:#fd0}.ae-logs-severity-1{background-color:#3c0;color:#000}.ae-logs-severity-0{background-color:#09f;color:#000}#ae-logs-legend{margin:1em 0 0 0}#ae-logs-legend ul{list-style:none;margin:0;padding:0}#ae-logs-legend li,#ae-logs-legend strong{float:left;margin:0 1em 0 0}#ae-logs-legend li span{margin-right:.3em}.ae-logs-timestamp{padding:0 5px;font-size:85%}#ae-logs-form-c{margin-bottom:5px;padding-bottom:.5em;padding-left:1em}#ae-logs-form{padding:.3em 0 0}#ae-logs-form .ae-label-row{float:left;padding-top:.2em;margin-right:0.539em}#ae-logs-form .ae-input-row,#ae-logs-form .ae-btn-row{margin-left:4em}#ae-logs-form .ae-btn-row{margin-bottom:0}#ae-logs-requests-c{margin-bottom:.1em}#ae-logs-requests-c input{margin:0}#ae-logs-requests-all-label{margin-right:0.539em}#ae-logs-form-options{margin-top:8px}#ae-logs-tip{margin:.2em 0}#ae-logs-expand{margin-right:.2em}#ae-logs-severity-level-label{margin-top:.3em;display:block}#ae-logs-filter-hint-labels-list{margin:2px 0}#ae-logs-filter-hint-labels-list span{position:absolute}#ae-logs-filter-hint-labels-list ul{margin-left:5.5em;padding:0}#ae-logs-filter-hint-labels-list li{float:left;margin-right:.4em;line-height:1.2}.ae-toggle .ae-logs-getdetails,.ae-toggle pre{display:none}.ae-log-expanded .ae-toggle pre{display:block}#ae-logs-c .ae-log .ae-toggle{cursor:default;background:none;padding-left:0}#ae-logs-c .ae-log .ae-toggle h5{cursor:pointer;background-position:0 .55em;background-repeat:no-repeat;padding-left:17px}.ae-log .ae-plus h5{background-image:url(/img/wgt/plus.gif)}.ae-log .ae-minus h5{background-image:url(/img/wgt/minus.gif)}.ae-log{overflow:hidden;background-color:#fff;padding:.3em 0;line-height:1.65;border-bottom:1px solid #c5d7ef}.ae-log .ae-even{background-color:#e9e9e9;border:0}.ae-log h5{font-weight:normal;white-space:nowrap;padding:.4em 0 0 0}.ae-log span,.ae-log strong{margin:0 .3em}.ae-log .ae-logs-snippet{color:#666}.ae-log pre,.ae-logs-expanded{padding:.3em 0 .5em 1.5em;margin:0;font-family:"Courier New"}.ae-log .file{font-weight:bold}.ae-log.ae-log-expanded .file{white-space:pre-wrap;word-wrap:break-word}.ae-logs-app .ae-logs-req{display:none}.ae-logs-req .ae-app,.ae-logs-both .ae-app{padding-left:1em}#ae-dos-blacklist-rejects-table{text-align:left}#ae-dash-quota-percent-col{width:3.5em}.ae-cron-status-ok{color:#008000;font-size:90%;font-weight:bold}.ae-cron-status-error{color:#a03;font-size:90%;font-weight:bold}#ae-cronjobs-table .ae-table td{vertical-align:top}#ae-tasks-table td{vertical-align:top}#ae-tasks-quota{margin:0 0 1em 0}#ae-tasks-quota .ae-dash-quota-bar{width:150px}#ae-tasks-quota #ae-dash-quota-bar-col,#ae-tasks-quota .ae-dash-quota-bar{width:200px}.ae-tasks-paused-row{color:#666;font-style:italic;font-weight:bold}#ae-tasks-quota .ae-quota-safety-limit{width:30%}#ae-tasks-table{margin-top:1em}#ae-tasks-queuecontrols{margin-top:1em;margin-bottom:1em}#ae-tasks-delete-col{width:1em}#ae-tasks-eta-col,#ae-tasks-creation-col{width:11em}#ae-tasks-actions-col{width:7em}#ae-tasks-retry-col{width:4em}#ae-tasks-body-col{width:6em}#ae-tasks-headers-col{width:7em}.ae-tasks-hex-column,.ae-tasks-ascii-column{width:16em}#ae-tasks-table .ae-tasks-arrow{text-align:center}
\ No newline at end of file
diff --git a/google/appengine/ext/datastore_admin/static/js/compiled.js b/google/appengine/ext/datastore_admin/static/js/compiled.js
index 244320a..d72410f 100755
--- a/google/appengine/ext/datastore_admin/static/js/compiled.js
+++ b/google/appengine/ext/datastore_admin/static/js/compiled.js
@@ -7,7 +7,7 @@
a:{var Sa="",P;if(Ma&&F.opera)var Ta=F.opera.version,Sa=typeof Ta=="function"?Ta():Ta;else if(Na?P=/rv\:([^\);]+)(\)|;)/:O?P=/MSIE\s+([^\);]+)(\)|;)/:Oa&&(P=/WebKit\/(\S+)/),P)var Ua=P.exec(Ja()),Sa=Ua?Ua[1]:"";if(O){var Va,Wa=F.document;Va=Wa?Wa.documentMode:h;if(Va>parseFloat(Sa)){Ra=o(Va);break a}}Ra=Sa}var Xa=Ra,Ya={},Za=function(a){return Ya[a]||(Ya[a]=ua(Xa,a)>=0)},$a={},ab=function(a){return $a[a]||($a[a]=O&&p.documentMode&&p.documentMode>=a)};var bb=!O||ab(9);!Na&&!O||O&&ab(9)||Na&&Za("1.9.1");O&&Za("9");var cb=function(a,b){var c;c=(c=a.className)&&typeof c[x]=="function"?c[x](/\s+/):[];var e=Ba(arguments,1),g;g=c;for(var d=0,f=0;f<e[s];f++)xa(g,e[f])>=0||(g[r](e[f]),d++);g=d==e[s];a.className=c.join(" ");return g};var db=function(a,b,c,e){var a=e||a,g=b&&b!="*"?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(!Oa||p.compatMode=="CSS1Compat"||Za("528"))&&(g||c))return a.querySelectorAll(g+(c?"."+c:""));if(c&&a.getElementsByClassName)if(b=a.getElementsByClassName(c),g){for(var a={},d=e=0,f;f=b[d];d++)g==f.nodeName&&(a[e++]=f);aa(a,e);return a}else return b;b=a.getElementsByTagName(g||"*");if(c){a={};for(d=e=0;f=b[d];d++){var g=f.className,j;if(j=typeof g[x]=="function")g=g[x](/\s+/),j=xa(g,c)>=0;j&&
(a[e++]=f)}aa(a,e);return a}else return b},fb=function(a,b){Ca(b,function(b,e){e=="style"?a.style.cssText=b:e=="class"?a.className=b:e=="for"?a.htmlFor=b:e in eb?a.setAttribute(eb[e],b):e.lastIndexOf("aria-",0)==0?a.setAttribute(e,b):a[e]=b})},eb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",rowspan:"rowSpan",valign:"vAlign",height:"height",width:"width",usemap:"useMap",frameborder:"frameBorder",maxlength:"maxLength",type:"type"},hb=function(a,b,c,e){function g(c){c&&b.appendChild(I(c)?
a.createTextNode(c):c)}for(;e<c[s];e++){var d=c[e];ha(d)&&!(ja(d)&&d.nodeType>0)?ya(gb(d)?Aa(d):d,g):g(d)}},ib=function(a,b,c){var e=p,g=arguments,d=g[0],f=g[1];if(!bb&&f&&(f[ga]||f[C])){d=["<",d];f[ga]&&d[r](' name="',sa(f[ga]),'"');if(f[C]){d[r](' type="',sa(f[C]),'"');var j={};Ea(j,f);f=j;delete f[C]}d[r](">");d=d.join("")}d=e.createElement(d);if(f)I(f)?d.className=f:H(f)=="array"?cb[D](i,[d].concat(f)):fb(d,f);g[s]>2&&hb(e,d,g,2);return d},gb=function(a){if(a&&typeof a[s]=="number")if(ja(a))return typeof a.item==
-"function"||typeof a.item=="string";else if(ia(a))return typeof a.item=="function";return!1};var jb=new Function("a","return a");var kb;!O||ab(9);O&&Za("8");var Q=function(){};Q[t].H=!1;Q[t].f=function(){if(!this.H)this.H=!0,this.h()};Q[t].h=function(){this.P&&lb[D](i,this.P)};var lb=function(a){for(var b=0,c=arguments[s];b<c;++b){var e=arguments[b];ha(e)?lb[D](i,e):e&&typeof e.f=="function"&&e.f()}};var R=function(a,b){this.type=a;ca(this,b);q(this,this[z])};K(R,Q);R[t].h=function(){delete this[C];delete this[z];delete this.currentTarget};R[t].r=!1;R[t].O=!0;var S=function(a,b){a&&this.o(a,b)};K(S,R);E=S[t];ca(E,i);E.relatedTarget=i;E.offsetX=0;E.offsetY=0;E.clientX=0;E.clientY=0;E.screenX=0;E.screenY=0;E.button=0;E.keyCode=0;E.charCode=0;E.ctrlKey=!1;E.altKey=!1;E.shiftKey=!1;E.metaKey=!1;E.N=!1;E.C=i;
+"function"||typeof a.item=="string";else if(ia(a))return typeof a.item=="function";return!1};var jb=function(a){jb[" "](a);return a};jb[" "]=G;var kb;!O||ab(9);O&&Za("8");var Q=function(){};Q[t].H=!1;Q[t].f=function(){if(!this.H)this.H=!0,this.h()};Q[t].h=function(){this.P&&lb[D](i,this.P)};var lb=function(a){for(var b=0,c=arguments[s];b<c;++b){var e=arguments[b];ha(e)?lb[D](i,e):e&&typeof e.f=="function"&&e.f()}};var R=function(a,b){this.type=a;ca(this,b);q(this,this[z])};K(R,Q);R[t].h=function(){delete this[C];delete this[z];delete this.currentTarget};R[t].r=!1;R[t].O=!0;var S=function(a,b){a&&this.o(a,b)};K(S,R);E=S[t];ca(E,i);E.relatedTarget=i;E.offsetX=0;E.offsetY=0;E.clientX=0;E.clientY=0;E.screenX=0;E.screenY=0;E.button=0;E.keyCode=0;E.charCode=0;E.ctrlKey=!1;E.altKey=!1;E.shiftKey=!1;E.metaKey=!1;E.N=!1;E.C=i;
E.o=function(a,b){var c=this.type=a[C];R[A](this,c);ca(this,a[z]||a.srcElement);q(this,b);var e=a.relatedTarget;if(e){if(Na){var g;a:{try{jb(e.nodeName);g=!0;break a}catch(d){}g=!1}g||(e=i)}}else if(c=="mouseover")e=a.fromElement;else if(c=="mouseout")e=a.toElement;this.relatedTarget=e;this.offsetX=a.offsetX!==h?a.offsetX:a.layerX;this.offsetY=a.offsetY!==h?a.offsetY:a.layerY;this.clientX=a.clientX!==h?a.clientX:a.pageX;this.clientY=a.clientY!==h?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=
a.screenY||0;this.button=a.button;this.keyCode=a[ea]||0;this.charCode=a.charCode||(c=="keypress"?a[ea]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.N=Ia?a.metaKey:a.ctrlKey;this.state=a.state;this.C=a;delete this.O;delete this.r};E.h=function(){S.B.h[A](this);this.C=i;ca(this,i);q(this,i);this.relatedTarget=i};var T=function(a,b){this.D=b;this.b=[];this.M(a)};K(T,Q);E=T[t];E.s=i;E.G=i;E.k=function(a){this.s=a};E.i=function(){return this.b[s]?this.b.pop():this.F()};E.j=function(a){this.b[s]<this.D?this.b[r](a):this.A(a)};E.M=function(a){if(a>this.D)throw k("[goog.structs.SimplePool] Initial cannot be greater than max");for(var b=0;b<a;b++)this.b[r](this.F())};E.F=function(){return this.s?this.s():{}};E.A=function(a){if(this.G)this.G(a);else if(ja(a))if(ia(a.f))a.f();else for(var b in a)delete a[b]};
E.h=function(){T.B.h[A](this);for(var a=this.b;a[s];)this.A(a.pop());delete this.b};var mb,nb=(mb="ScriptEngine"in F&&F.ScriptEngine()=="JScript")?F.ScriptEngineMajorVersion()+"."+F.ScriptEngineMinorVersion()+"."+F.ScriptEngineBuildVersion():"0";var ob=function(){},pb=0;E=ob[t];E.c=0;E.e=!1;E.v=!1;E.o=function(a,b,c,e,g,d){if(ia(a))this.z=!0;else if(a&&a[fa]&&ia(a[fa]))this.z=!1;else throw k("Invalid listener argument");this.n=a;this.u=b;this.src=c;this.type=e;this.J=!!g;this.t=d;this.v=!1;this.c=++pb;this.e=!1};E.handleEvent=function(a){return this.z?this.n[A](this.t||this.src,a):this.n[fa][A](this.n,a)};var qb,rb,U,sb,tb,ub,vb,wb,xb,yb,zb;
diff --git a/google/appengine/ext/datastore_admin/utils.py b/google/appengine/ext/datastore_admin/utils.py
index 474b3ab..7195f82 100755
--- a/google/appengine/ext/datastore_admin/utils.py
+++ b/google/appengine/ext/datastore_admin/utils.py
@@ -41,6 +41,7 @@
from google.appengine.ext.mapreduce import control
from google.appengine.ext.mapreduce import input_readers
from google.appengine.ext.mapreduce import model
+from google.appengine.ext.webapp import _template
MEMCACHE_NAMESPACE = '_ah-datastore_admin'
XSRF_VALIDITY_TIME = 600
@@ -70,7 +71,6 @@
config.BASE_PATH
-from google.appengine.ext.webapp import template
def RenderToResponse(handler, template_file, template_params):
@@ -82,7 +82,7 @@
template_params: the parameters used to render the given template
"""
template_params = _GetDefaultParams(template_params)
- rendered = template.render(_GetTemplatePath(template_file), template_params)
+ rendered = _template.render(_GetTemplatePath(template_file), template_params)
handler.response.out.write(rendered)
diff --git a/google/appengine/ext/db/__init__.py b/google/appengine/ext/db/__init__.py
index 7b4fc94..4806baf 100755
--- a/google/appengine/ext/db/__init__.py
+++ b/google/appengine/ext/db/__init__.py
@@ -504,6 +504,11 @@
_kind_map[cls.kind()] = cls
+
+
+AUTO_UPDATE_UNCHANGED = object()
+
+
class Property(object):
"""A Property is an attribute of a Model.
@@ -656,6 +661,10 @@
entity. Most critically, it will fetch the datastore key value for
reference properties.
+ Some properies (e.g. DateTimeProperty, UserProperty) optionally update their
+ value on every put(). This call must return the current value for such
+ properties (get_updated_value_for_datastore returns the new value).
+
Args:
model_instance: Instance to fetch datastore value from.
@@ -665,6 +674,23 @@
"""
return self.__get__(model_instance, model_instance.__class__)
+ def get_updated_value_for_datastore(self, model_instance):
+ """Determine new value for auto-updated property.
+
+ Some properies (e.g. DateTimeProperty, UserProperty) optionally update their
+ value on every put(). This call must return the new desired value for such
+ properties. For all other properties, this call must return
+ AUTO_UPDATE_UNCHANGED.
+
+ Args:
+ model_instance: Instance to get new value for.
+
+ Returns:
+ Datastore representation of the new model value in a form that is
+ appropriate for storing in the datastore, or AUTO_UPDATE_UNCHANGED.
+ """
+ return AUTO_UPDATE_UNCHANGED
+
def make_value_from_datastore(self, value):
"""Native representation of this property.
@@ -926,6 +952,13 @@
if prop.name in kwds and not _from_entity:
raise
+
+ if isinstance(_from_entity, datastore.Entity) and _from_entity.is_saved():
+ self._entity = _from_entity
+ del self._key_name
+ del self._key
+
+
def key(self):
"""Unique key for this entity.
@@ -952,6 +985,15 @@
else:
raise NotSavedError()
+ def __set_property(self, entity, name, datastore_value):
+ if datastore_value == []:
+
+
+
+ entity.pop(name, None)
+ else:
+ entity[name] = datastore_value
+
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
@@ -960,18 +1002,7 @@
"""
for prop in self.properties().values():
- datastore_value = prop.get_value_for_datastore(self)
- if datastore_value == []:
-
-
-
- try:
- del entity[prop.name]
- except KeyError:
-
- pass
- else:
- entity[prop.name] = datastore_value
+ self.__set_property(entity, prop.name, prop.get_value_for_datastore(self))
set_unindexed_properties = getattr(entity, 'set_unindexed_properties', None)
@@ -987,6 +1018,13 @@
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
+
+
+ for prop in self.properties().values():
+ new_value = prop.get_updated_value_for_datastore(self)
+ if new_value is not AUTO_UPDATE_UNCHANGED:
+ self.__set_property(self._entity, prop.name, new_value)
+
for attr in ('_key_name', '_key'):
try:
delattr(self, attr)
@@ -1378,12 +1416,7 @@
entity_values = cls._load_entity_values(entity)
if entity.key().has_id_or_name():
entity_values['key'] = entity.key()
- instance = cls(None, _from_entity=True, **entity_values)
- if entity.is_saved():
- instance._entity = entity
- del instance._key_name
- del instance._key
- return instance
+ return cls(None, _from_entity=entity, **entity_values)
@classmethod
def kind(cls):
@@ -2880,18 +2913,16 @@
return self.now()
return Property.default_value(self)
- def get_value_for_datastore(self, model_instance):
- """Get value from property to send to datastore.
+ def get_updated_value_for_datastore(self, model_instance):
+ """Get new value for property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
- auto_now is set to True, else the default implementation.
+ auto_now is set to True, else AUTO_UPDATE_UNCHANGED.
"""
if self.auto_now:
return self.now()
- else:
- return super(DateTimeProperty,
- self).get_value_for_datastore(model_instance)
+ return AUTO_UPDATE_UNCHANGED
data_type = datetime.datetime
@@ -2969,6 +3000,17 @@
(self.name, self.data_type.__name__))
return value
+ def get_updated_value_for_datastore(self, model_instance):
+ """Get new value for property to send to datastore.
+
+ Returns:
+ now() as appropriate to the date instance in the odd case where
+ auto_now is set to True, else AUTO_UPDATE_UNCHANGED.
+ """
+ if self.auto_now:
+ return _date_to_datetime(self.now())
+ return AUTO_UPDATE_UNCHANGED
+
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
@@ -3028,6 +3070,17 @@
"""
return value is None
+ def get_updated_value_for_datastore(self, model_instance):
+ """Get new value for property to send to datastore.
+
+ Returns:
+ now() as appropriate to the time instance in the odd case where
+ auto_now is set to True, else AUTO_UPDATE_UNCHANGED.
+ """
+ if self.auto_now:
+ return _time_to_datetime(self.now())
+ return AUTO_UPDATE_UNCHANGED
+
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
@@ -3229,16 +3282,16 @@
return users.get_current_user()
return None
- def get_value_for_datastore(self, model_instance):
- """Get value from property to send to datastore.
+ def get_updated_value_for_datastore(self, model_instance):
+ """Get new value for property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
- else the default implementation.
+ else AUTO_UPDATE_UNCHANGED.
"""
if self.auto_current_user:
return users.get_current_user()
- return super(UserProperty, self).get_value_for_datastore(model_instance)
+ return AUTO_UPDATE_UNCHANGED
data_type = users.User
diff --git a/google/appengine/ext/ereporter/report_generator.py b/google/appengine/ext/ereporter/report_generator.py
index 5d43aa5..25aecae 100755
--- a/google/appengine/ext/ereporter/report_generator.py
+++ b/google/appengine/ext/ereporter/report_generator.py
@@ -54,7 +54,7 @@
from google.appengine.ext import db
from google.appengine.ext import ereporter
from google.appengine.ext import webapp
-from google.appengine.ext.webapp import template
+from google.appengine.ext.webapp import _template
from google.appengine.ext.webapp.util import run_wsgi_app
@@ -125,7 +125,7 @@
'versions': versions,
}
path = os.path.join(os.path.dirname(__file__), 'templates', 'report.html')
- return template.render(path, template_values)
+ return _template.render(path, template_values)
def SendReport(self, report):
"""Emails an exception report.
diff --git a/google/appengine/ext/go/__init__.py b/google/appengine/ext/go/__init__.py
index ee93431..b14289f 100644
--- a/google/appengine/ext/go/__init__.py
+++ b/google/appengine/ext/go/__init__.py
@@ -242,7 +242,11 @@
ename = os.path.join(dname, entry)
if APP_CONFIG.skip_files.match(ename):
continue
- s = os.stat(ename)
+ try:
+ s = os.stat(ename)
+ except OSError, e:
+ logging.warn('%s', e)
+ continue
if stat.S_ISDIR(s[stat.ST_MODE]):
dirs.append(ename)
continue
diff --git a/google/appengine/ext/gql/__init__.py b/google/appengine/ext/gql/__init__.py
index ffb0833..2843555 100755
--- a/google/appengine/ext/gql/__init__.py
+++ b/google/appengine/ext/gql/__init__.py
@@ -56,6 +56,9 @@
_EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+_EMPTY_LIST_PROPERTY_NAME = '__empty_IN_list__'
+
def Execute(query_string, *args, **keyword_args):
"""Execute command to parse and run the query.
@@ -636,7 +639,7 @@
assert False, 'Unknown reference %s' % reference
def __AddMultiQuery(self, identifier, condition, value, enumerated_queries):
- """Helper function to add a muti-query to previously enumerated queries.
+ """Helper function to add a multi-query to previously enumerated queries.
Args:
identifier: property being filtered by this condition
@@ -695,6 +698,14 @@
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
+ if in_list_size == 0:
+
+ num_iterations = CloneQueries(enumerated_queries, 1)
+ for clone_num in xrange(num_iterations):
+
+ enumerated_queries[clone_num][_EMPTY_LIST_PROPERTY_NAME] = True
+ return
+
num_iterations = CloneQueries(enumerated_queries, in_list_size)
for clone_num in xrange(num_iterations):
for value_num in xrange(len(value)):
diff --git a/google/appengine/ext/key_range/__init__.py b/google/appengine/ext/key_range/__init__.py
index 925e2fb..ef37716 100755
--- a/google/appengine/ext/key_range/__init__.py
+++ b/google/appengine/ext/key_range/__init__.py
@@ -28,9 +28,12 @@
try:
- import simplejson
+ import json as simplejson
except ImportError:
- simplejson = None
+ try:
+ import simplejson
+ except ImportError:
+ simplejson = None
from google.appengine.api import datastore
from google.appengine.api import namespace_manager
@@ -47,7 +50,7 @@
class SimplejsonUnavailableError(Error):
- """Error while using json functionality whith unavailable simplejson."""
+ """Error using json functionality with unavailable json and simplejson."""
class KeyRange(object):
@@ -635,7 +638,7 @@
"""
if simplejson is None:
raise SimplejsonUnavailableError(
- "JSON functionality requires simplejson to be available")
+ "JSON functionality requires json or simplejson to be available")
def key_to_str(key):
if key:
@@ -669,7 +672,7 @@
"""
if simplejson is None:
raise SimplejsonUnavailableError(
- "JSON functionality requires simplejson to be available")
+ "JSON functionality requires json or simplejson to be available")
def key_from_str(key_str):
if key_str:
diff --git a/google/appengine/ext/mapreduce/base_handler.py b/google/appengine/ext/mapreduce/base_handler.py
index 849c363..d32f661 100755
--- a/google/appengine/ext/mapreduce/base_handler.py
+++ b/google/appengine/ext/mapreduce/base_handler.py
@@ -36,7 +36,10 @@
import logging
-import simplejson
+try:
+ import json as simplejson
+except ImportError:
+ import simplejson
import google
try:
@@ -102,9 +105,9 @@
name of the error_class and the error_message.
"""
- def __init__(self):
+ def __init__(self, *args):
"""Initializer."""
- super(BaseHandler, self).__init__()
+ super(BaseHandler, self).__init__(*args)
self.json_response = {}
def base_path(self):
diff --git a/google/appengine/ext/mapreduce/handlers.py b/google/appengine/ext/mapreduce/handlers.py
index a496179..b6bf008 100755
--- a/google/appengine/ext/mapreduce/handlers.py
+++ b/google/appengine/ext/mapreduce/handlers.py
@@ -42,7 +42,6 @@
import logging
import math
import os
-import simplejson
import time
from google.appengine.api import memcache
@@ -112,14 +111,10 @@
slice_id: id of the slice.
"""
- def __init__(self, time_function=time.time):
- """Constructor.
-
- Args:
- time_function: time function to use to obtain current time.
- """
- util.HugeTaskHandler.__init__(self)
- self._time = time_function
+ def __init__(self, *args):
+ """Constructor."""
+ util.HugeTaskHandler.__init__(self, *args)
+ self._time = time.time
def handle(self):
"""Handle request."""
@@ -350,14 +345,10 @@
mapreduce is still active.
"""
- def __init__(self, time_function=time.time):
- """Constructor.
-
- Args:
- time_function: time function to use to obtain current time.
- """
- util.HugeTaskHandler.__init__(self)
- self._time = time_function
+ def __init__(self, *args):
+ """Constructor."""
+ util.HugeTaskHandler.__init__(self, *args)
+ self._time = time.time
def handle(self):
"""Handle request."""
diff --git a/google/appengine/ext/mapreduce/input_readers.py b/google/appengine/ext/mapreduce/input_readers.py
index 558888d..45f8ade 100755
--- a/google/appengine/ext/mapreduce/input_readers.py
+++ b/google/appengine/ext/mapreduce/input_readers.py
@@ -1235,6 +1235,7 @@
UNAPPLIED_LOG_FILTER = "__unapplied_log_timestamp_us__ <"
DUMMY_KIND = "DUMMY_KIND"
DUMMY_ID = 106275677020293L
+ UNAPPLIED_QUERY_DEADLINE = 270
def _get_unapplied_jobs_accross_namespaces(self,
namespace_start,
@@ -1248,7 +1249,10 @@
_app=app),
self.UNAPPLIED_LOG_FILTER: self.start_time_us}
unapplied_query = datastore.Query(filters=filters, keys_only=True, _app=app)
- return unapplied_query.Get(limit=self._batch_size)
+ return unapplied_query.Get(
+ limit=self._batch_size,
+ config=datastore_rpc.Configuration(
+ deadline=self.UNAPPLIED_QUERY_DEADLINE))
def _iter_ns_range(self):
while True:
@@ -1278,7 +1282,11 @@
kind=None, keys_only=True)
unapplied_query[
ConsistentKeyReader.UNAPPLIED_LOG_FILTER] = self.start_time_us
- unapplied_jobs = unapplied_query.Get(limit=self._batch_size)
+
+ unapplied_jobs = unapplied_query.Get(
+ limit=self._batch_size,
+ config=datastore_rpc.Configuration(
+ deadline=self.UNAPPLIED_QUERY_DEADLINE))
if not unapplied_jobs:
break
self._apply_jobs(unapplied_jobs)
@@ -1298,7 +1306,7 @@
keys_to_apply.append(
db.Key.from_path(_app=key.app(), namespace=key.namespace(), *path))
db.get(keys_to_apply, config=datastore_rpc.Configuration(
- deadline=10,
+ deadline=self.UNAPPLIED_QUERY_DEADLINE,
read_policy=datastore_rpc.Configuration.APPLY_ALL_JOBS_CONSISTENCY))
@classmethod
diff --git a/google/appengine/ext/mapreduce/model.py b/google/appengine/ext/mapreduce/model.py
index 011bf73..524eb13 100644
--- a/google/appengine/ext/mapreduce/model.py
+++ b/google/appengine/ext/mapreduce/model.py
@@ -50,7 +50,11 @@
import logging
import math
import os
-import simplejson
+import random
+try:
+ import json as simplejson
+except ImportError:
+ import simplejson
import time
import types
@@ -60,7 +64,7 @@
from google.appengine.ext.mapreduce import context
from google.appengine.ext.mapreduce import hooks
from google.appengine.ext.mapreduce import util
-from graphy.backends import google_chart_api
+from google.appengine._internal.graphy.backends import google_chart_api
@@ -228,8 +232,10 @@
A string with a time descending key.
"""
now_descending = int((_FUTURE_TIME - gettime()) * 100)
- return "%d%s" % (now_descending,
- os.environ.get("REQUEST_ID_HASH", "FFFFFFFF"))
+ request_id_hash = os.environ.get("REQUEST_ID_HASH")
+ if not request_id_hash:
+ request_id_hash = str(random.getrandbits(32))
+ return "%d%s" % (now_descending, request_id_hash)
class CountersMap(JsonMixin):
diff --git a/google/appengine/ext/mapreduce/status.py b/google/appengine/ext/mapreduce/status.py
index 21ecc94..dd2b042 100755
--- a/google/appengine/ext/mapreduce/status.py
+++ b/google/appengine/ext/mapreduce/status.py
@@ -45,7 +45,6 @@
from google.appengine.ext.mapreduce import base_handler
from google.appengine.ext.mapreduce import errors
from google.appengine.ext.mapreduce import model
-from google.appengine.ext.webapp import template
diff --git a/google/appengine/ext/mapreduce/test_support.py b/google/appengine/ext/mapreduce/test_support.py
new file mode 100644
index 0000000..22739bb
--- /dev/null
+++ b/google/appengine/ext/mapreduce/test_support.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+"""Utilities to aid in testing mapreduces."""
+
+
+import base64
+import cgi
+import os
+import re
+
+from google.appengine.ext.mapreduce import main
+from google.appengine.ext.webapp import mock_webapp
+from google.appengine.ext.mapreduce import util
+
+
+def decode_task_payload(task):
+ """Decodes POST task payload.
+
+ Args:
+ task: a task to decode its payload.
+
+ Returns:
+ parameter_name -> parameter_value dict. If multiple parameter values are
+ present, then parameter_value will be a list.
+ """
+ body = task["body"]
+ if not body:
+ return {}
+ decoded = base64.b64decode(body)
+ result = {}
+ for (name, value) in cgi.parse_qs(decoded).items():
+ if len(value) == 1:
+ result[name] = value[0]
+ else:
+ result[name] = value
+ return util.HugeTask.decode_payload(result)
+
+
+def execute_task(task, handlers_map=None):
+ """Execute mapper's executor task.
+
+ This will try to determine the correct mapper handler for the task, will set
+ up all mock environment necessary for task execution, and execute the task
+ itself.
+
+ This function can be used for functional-style testing of functionality
+ depending on mapper framework.
+ """
+ if not handlers_map:
+ handlers_map = main.create_handlers_map()
+
+ url = task["url"]
+ handler = None
+
+ for (re_str, handler_class) in handlers_map:
+ if re.match(re_str, url):
+ handler = handler_class()
+ break
+
+ if not handler:
+ raise Exception("Can't determine handler for %s" % task)
+
+ handler.initialize(mock_webapp.MockRequest(),
+ mock_webapp.MockResponse())
+ handler.request.set_url(url)
+
+ for k, v in task["headers"]:
+ handler.request.headers[k] = v
+ environ_key = "HTTP_" + k.replace("-", "_").upper()
+ handler.request.environ[environ_key] = v
+ handler.request.environ["HTTP_X_APPENGINE_TASKNAME"] = task["name"]
+ handler.request.environ["HTTP_X_APPENGINE_QUEUENAME"] = task["queue_name"]
+ handler.request.environ["PATH_INFO"] = handler.request.path
+
+ saved_os_environ = os.environ
+ try:
+ os.environ = dict(os.environ)
+ os.environ.update(handler.request.environ)
+ if task["method"] == "POST":
+ for k, v in decode_task_payload(task).items():
+ handler.request.set(k, v)
+ handler.post()
+ elif task["method"] == "GET":
+ handler.get()
+ else:
+ raise Exception("Unsupported method: %s" % task.method)
+ finally:
+ os.environ = saved_os_environ
+
+ if handler.response.status != 200:
+ raise Exception("Handler failure: %s (%s). \nTask: %s\nHandler: %s" %
+ (handler.response.status,
+ handler.response.status_message,
+ task,
+ handler))
+
+
+def execute_all_tasks(taskqueue, queue="default", handlers_map=None):
+ """Run and remove all tasks in the taskqueue.
+
+ Args:
+ taskqueue: An instance of taskqueue stub.
+ queue: Queue name to run all tasks from.
+ """
+ tasks = taskqueue.GetTasks(queue)
+ taskqueue.FlushQueue(queue)
+ for task in tasks:
+ execute_task(task,handlers_map=handlers_map)
+
+
+def execute_until_empty(taskqueue, queue="default", handlers_map=None):
+ """Execute taskqueue tasks until it becomes empty.
+
+ Args:
+ taskqueue: An instance of taskqueue stub.
+ queue: Queue name to run all tasks from.
+ """
+ while taskqueue.GetTasks(queue):
+ execute_all_tasks(taskqueue, queue, handlers_map)
+
diff --git a/google/appengine/ext/remote_api/handler.py b/google/appengine/ext/remote_api/handler.py
index 835fe41..9f388c1 100755
--- a/google/appengine/ext/remote_api/handler.py
+++ b/google/appengine/ext/remote_api/handler.py
@@ -350,9 +350,10 @@
</body>
</html>"""
+application = webapp.WSGIApplication([('.*', ApiCallHandler)])
+
def main():
- application = webapp.WSGIApplication([('.*', ApiCallHandler)])
wsgiref.handlers.CGIHandler().run(application)
diff --git a/google/appengine/ext/remote_api/remote_api_pb.py b/google/appengine/ext/remote_api/remote_api_pb.py
index 62a7509..c621675 100644
--- a/google/appengine/ext/remote_api/remote_api_pb.py
+++ b/google/appengine/ext/remote_api/remote_api_pb.py
@@ -205,6 +205,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ext.remote_api.Request'
class ApplicationError(ProtocolBuffer.ProtocolMessage):
has_code_ = 0
code_ = 0
@@ -343,6 +344,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ext.remote_api.ApplicationError'
class Response(ProtocolBuffer.ProtocolMessage):
has_response_ = 0
response_ = ""
@@ -555,6 +557,7 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ext.remote_api.Response'
class TransactionRequest_Precondition(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
has_hash_ = 0
@@ -886,5 +889,6 @@
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+ _PROTO_DESCRIPTOR_NAME = 'apphosting.ext.remote_api.TransactionRequest'
__all__ = ['Request','ApplicationError','Response','TransactionRequest','TransactionRequest_Precondition']
diff --git a/google/appengine/ext/remote_api/remote_api_services.py b/google/appengine/ext/remote_api/remote_api_services.py
index 2f1436b..a7f9190 100755
--- a/google/appengine/ext/remote_api/remote_api_services.py
+++ b/google/appengine/ext/remote_api/remote_api_services.py
@@ -131,7 +131,7 @@
'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
'Transaction': (remote_api_pb.TransactionRequest,
datastore_pb.PutResponse),
- 'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+ 'GetIDs': (datastore_pb.PutRequest, datastore_pb.PutResponse),
},
'taskqueue': {
'Add': (taskqueue_service_pb.TaskQueueAddRequest,
diff --git a/google/appengine/ext/remote_api/remote_api_stub.py b/google/appengine/ext/remote_api/remote_api_stub.py
index e283281..5e7d740 100755
--- a/google/appengine/ext/remote_api/remote_api_stub.py
+++ b/google/appengine/ext/remote_api/remote_api_stub.py
@@ -75,7 +75,6 @@
import yaml
import hashlib
-from google.appengine.api import datastore
from google.appengine.api import apiproxy_rpc
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
@@ -254,6 +253,11 @@
assert response.IsInitialized(explanation), explanation
def _Dynamic_RunQuery(self, query, query_result, cursor_id = None):
+ if query.has_transaction():
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Remote API does not support queries inside transactions')
+
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'RunQuery', query, query_result)
diff --git a/google/appengine/ext/webapp/__init__.py b/google/appengine/ext/webapp/__init__.py
index c1ac324..a87ec76 100755
--- a/google/appengine/ext/webapp/__init__.py
+++ b/google/appengine/ext/webapp/__init__.py
@@ -185,9 +185,7 @@
_config_handle = lib_config.register(
'webapp',
- {'django_setup': _django_setup,
- 'add_wsgi_middleware': lambda app: app,
- })
+ {'add_wsgi_middleware': lambda app: app,})
from webapp2 import *
else:
_config_handle = lib_config.register(
@@ -196,5 +194,5 @@
'django_version': None,
'add_wsgi_middleware': lambda app: app,
})
- from _webapp25 import *
- from _webapp25 import __doc__
+ from google.appengine.ext.webapp._webapp25 import *
+ from google.appengine.ext.webapp._webapp25 import __doc__
diff --git a/google/appengine/ext/webapp/_template.py b/google/appengine/ext/webapp/_template.py
new file mode 100644
index 0000000..0dbc044
--- /dev/null
+++ b/google/appengine/ext/webapp/_template.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+"""A simple django-based templating framework for use by internal components.
+
+This module should NOT be used by components outside of the google.appengine
+package.
+"""
+
+
+
+
+import warnings
+warnings.filterwarnings('ignore',
+ '',
+ DeprecationWarning,
+ r'ext\.webapp\._template')
+
+import google.appengine._internal.django.template as django_template
+from google.appengine.ext.webapp import template
+
+
+def render(template_path, template_dict, debug=False):
+ """Renders the template at the given path with the given dict of values.
+
+ Example usage:
+ render("templates/index.html", {"name": "Bret", "values": [1, 2, 3]})
+
+ Args:
+ template_path: path to a Django template
+ template_dict: dictionary of values to apply to the template
+
+ Returns:
+ The rendered template as a string.
+ """
+ t = template._load_internal_django(template_path, debug)
+ return t.render(django_template.Context(template_dict))
diff --git a/google/appengine/ext/webapp/blobstore_handlers.py b/google/appengine/ext/webapp/blobstore_handlers.py
index 4664213..97891f1 100755
--- a/google/appengine/ext/webapp/blobstore_handlers.py
+++ b/google/appengine/ext/webapp/blobstore_handlers.py
@@ -329,8 +329,8 @@
class BlobstoreUploadHandler(webapp.RequestHandler):
"""Base class for creation blob upload handlers."""
- def __init__(self):
- super(BlobstoreUploadHandler, self).__init__()
+ def __init__(self, *args, **kwargs):
+ super(BlobstoreUploadHandler, self).__init__(*args, **kwargs)
self.__uploads = None
def get_uploads(self, field_name=None):
diff --git a/google/appengine/ext/webapp/template.py b/google/appengine/ext/webapp/template.py
index 24494f9..5711125 100755
--- a/google/appengine/ext/webapp/template.py
+++ b/google/appengine/ext/webapp/template.py
@@ -51,12 +51,25 @@
import logging
import os
+import warnings
-from google.appengine.ext import webapp
-webapp._config_handle.django_setup()
+if os.environ.get('APPENGINE_RUNTIME') == 'python27':
+ import google.appengine._internal.django.template.loader
+ from google.appengine._internal import django
+ warnings.warn(
+ 'google.appengine.ext.webapp.template is deprecated. Please use another '
+ 'templating system such as django.template or jinja2.',
+ DeprecationWarning,
+ stacklevel=2)
+else:
+ from google.appengine.ext import webapp
+ webapp._config_handle.django_setup()
-import django.template
-import django.template.loader
+ import django.template
+ import django.template.loader
+
+
+template_cache = {}
def render(template_path, template_dict, debug=False):
@@ -68,19 +81,16 @@
Args:
template_path: path to a Django template
template_dict: dictionary of values to apply to the template
+
+ Returns:
+ The rendered template as a string.
"""
t = load(template_path, debug)
return t.render(Context(template_dict))
-template_cache = {}
-def load(path, debug=False):
- """Loads the Django template from the given path.
-
- It is better to use this function than to construct a Template using the
- class below because Django requires you to load the template with a method
- if you want imports and extends to work in the template.
- """
+def _load_user_django(path, debug):
+ """Load the given template using the django found in third_party."""
abspath = os.path.abspath(path)
if not debug:
@@ -122,6 +132,59 @@
return template
+def _load_internal_django(path, debug):
+ """Load the given template using the django found in apphosting._internal."""
+ import google.appengine._internal.django.conf
+ import google.appengine._internal.django.template.loader
+ from google.appengine._internal import django
+
+ abspath = os.path.abspath(path)
+
+ if not debug:
+ template = template_cache.get(abspath, None)
+ else:
+ template = None
+
+ if not template:
+ directory, file_name = os.path.split(abspath)
+ settings = dict(
+ TEMPLATE_LOADERS=(
+ 'google.appengine._internal.'
+ 'django.template.loaders.filesystem.load_template_source',
+ ),
+ TEMPLATE_DIRS=(directory,),
+ TEMPLATE_DEBUG=debug,
+ DEBUG=debug)
+
+ django.conf.settings.configure(**settings)
+ template = django.template.loader.get_template(file_name)
+
+ if not debug:
+ template_cache[abspath] = template
+
+ def wrap_render(context, orig_render=template.render):
+
+
+ django.conf.settings.configure(**settings)
+ return orig_render(context)
+ template.render = wrap_render
+
+ return template
+
+
+def load(path, debug=False):
+ """Loads the Django template from the given path.
+
+ It is better to use this function than to construct a Template using the
+ class below because Django requires you to load the template with a method
+ if you want imports and extends to work in the template.
+ """
+ if os.environ.get('APPENGINE_RUNTIME') == 'python27':
+ return _load_internal_django(path, debug)
+ else:
+ return _load_user_django(path, debug)
+
+
def _swap_settings(new):
"""Swap in selected Django settings, returning old settings.
diff --git a/google/appengine/tools/appcfg.py b/google/appengine/tools/appcfg.py
index ad81e19..8825e76 100755
--- a/google/appengine/tools/appcfg.py
+++ b/google/appengine/tools/appcfg.py
@@ -111,6 +111,8 @@
DAY = 24*3600
SUNDAY = 6
+SUPPORTED_RUNTIMES = ('go', 'python', 'python27')
+
def PrintUpdate(msg):
"""Print a message to stderr.
@@ -134,6 +136,18 @@
PrintUpdate(msg)
+def _PrintErrorAndExit(stream, msg, exit_code=2):
+ """Prints the given error message and exists the program.
+
+ Args:
+ stream: The stream (e.g. StringIO or file) to write the message to.
+ msg: The error message to display as a string.
+ exit_code: The integer code to pass to sys.exit().
+ """
+ stream.write(msg)
+ sys.exit(exit_code)
+
+
def GetMimeTypeIfStaticFile(config, filename):
"""Looks up the mime type for 'filename'.
@@ -2213,7 +2227,8 @@
throttle_class=None,
opener=open,
file_iterator=FileIterator,
- time_func=time.time):
+ time_func=time.time,
+ wrap_server_error_message=True):
"""Initializer. Parses the cmdline and selects the Action to use.
Initializes all of the attributes described in the class docstring.
@@ -2237,6 +2252,10 @@
regular expression.
time_func: Function which provides the current time (can be replaced for
testing).
+ wrap_server_error_message: If true, the error messages from
+ urllib2.HTTPError exceptions in Run() are wrapped with
+ '--- begin server output ---' and '--- end server output ---',
+ otherwise the error message is printed as is.
"""
self.parser_class = parser_class
self.argv = argv
@@ -2248,6 +2267,7 @@
self.update_check_class = update_check_class
self.throttle_class = throttle_class
self.time_func = time_func
+ self.wrap_server_error_message = wrap_server_error_message
@@ -2264,7 +2284,14 @@
self._PrintHelpAndExit()
if not self.options.allow_any_runtime:
- appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = 'python|go'
+ if self.options.runtime:
+ if self.options.runtime not in SUPPORTED_RUNTIMES:
+ _PrintErrorAndExit(self.error_fh,
+ '"%s" is not a supported runtime\n' %
+ self.options.runtime)
+ else:
+ appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = (
+ '|'.join(SUPPORTED_RUNTIMES))
action = self.args.pop(0)
@@ -2357,9 +2384,13 @@
self.action(self)
except urllib2.HTTPError, e:
body = e.read()
- print >>self.error_fh, ('Error %d: --- begin server output ---\n'
- '%s\n--- end server output ---' %
- (e.code, body.rstrip('\n')))
+ if self.wrap_server_error_message:
+ error_format = ('Error %d: --- begin server output ---\n'
+ '%s\n--- end server output ---')
+ else:
+ error_format = 'Error %d: %s'
+
+ print >>self.error_fh, (error_format % (e.code, body.rstrip('\n')))
return 1
except yaml_errors.EventListenerError, e:
print >>self.error_fh, ('Error parsing yaml file:\n%s' % e)
@@ -2438,6 +2469,8 @@
help='Override application from app.yaml file.')
parser.add_option('-V', '--version', action='store', dest='version',
help='Override (major) version from app.yaml file.')
+ parser.add_option('-r', '--runtime', action='store', dest='runtime',
+ help='Override runtime from app.yaml file.')
parser.add_option('-R', '--allow_any_runtime', action='store_true',
dest='allow_any_runtime', default=False,
help='Do not validate the runtime in app.yaml')
@@ -2541,10 +2574,15 @@
if not os.path.isdir(basepath):
self.parser.error('Not a directory: %s' % basepath)
- for yaml_file in (file_name + '.yaml', file_name + '.yml'):
- yaml_path = os.path.join(basepath, yaml_file)
- if os.path.isfile(yaml_path):
- return yaml_path
+
+
+ alt_basepath = os.path.join(basepath, "WEB-INF", "appengine-generated")
+
+ for yaml_basepath in (basepath, alt_basepath):
+ for yaml_file in (file_name + '.yaml', file_name + '.yml'):
+ yaml_path = os.path.join(yaml_basepath, yaml_file)
+ if os.path.isfile(yaml_path):
+ return yaml_path
return None
@@ -2577,6 +2615,9 @@
appyaml.application = self.options.app_id
if self.options.version:
appyaml.version = self.options.version
+ if self.options.runtime:
+ appyaml.runtime = self.options.runtime
+
msg = 'Application: %s' % appyaml.application
if appyaml.application != orig_application:
msg += ' (was: %s)' % orig_application
diff --git a/google/appengine/tools/bulkloader.py b/google/appengine/tools/bulkloader.py
index f5ced2f..79d6129 100755
--- a/google/appengine/tools/bulkloader.py
+++ b/google/appengine/tools/bulkloader.py
@@ -1346,6 +1346,8 @@
kind: The string name of a kind.
high_id: The int value to which to increment the unique id counter.
"""
+ if self.dry_run:
+ return
high_id_key = datastore.Key.from_path(*(ancestor_path + [kind, high_id]))
IncrementId(high_id_key)
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index ea7f7da..a206700 100755
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -1835,6 +1835,9 @@
goog.dom.isNodeLike = function(obj) {
return goog.isObject(obj) && obj.nodeType > 0
};
+goog.dom.isElement = function(obj) {
+ return goog.isObject(obj) && obj.nodeType == goog.dom.NodeType.ELEMENT
+};
goog.dom.isWindow = function(obj) {
return goog.isObject(obj) && obj.window == obj
};
@@ -1990,7 +1993,7 @@
return!1
};
goog.dom.setFocusableTabIndex = function(element, enable) {
- enable ? element.tabIndex = 0 : element.removeAttribute("tabIndex")
+ enable ? element.tabIndex = 0 : (element.tabIndex = -1, element.removeAttribute("tabIndex"))
};
goog.dom.getTextContent = function(node) {
var textContent;
@@ -2188,18 +2191,30 @@
goog.debug.EntryPointMonitor = function() {
};
goog.debug.entryPointRegistry.refList_ = [];
+goog.debug.entryPointRegistry.monitors_ = [];
+goog.debug.entryPointRegistry.monitorsMayExist_ = !1;
goog.debug.entryPointRegistry.register = function(callback) {
- goog.debug.entryPointRegistry.refList_[goog.debug.entryPointRegistry.refList_.length] = callback
+ goog.debug.entryPointRegistry.refList_[goog.debug.entryPointRegistry.refList_.length] = callback;
+ if(goog.debug.entryPointRegistry.monitorsMayExist_) {
+ for(var monitors = goog.debug.entryPointRegistry.monitors_, i = 0;i < monitors.length;i++) {
+ callback(goog.bind(monitors[i].wrap, monitors[i]))
+ }
+ }
};
goog.debug.entryPointRegistry.monitorAll = function(monitor) {
+ goog.debug.entryPointRegistry.monitorsMayExist_ = !0;
for(var transformer = goog.bind(monitor.wrap, monitor), i = 0;i < goog.debug.entryPointRegistry.refList_.length;i++) {
goog.debug.entryPointRegistry.refList_[i](transformer)
}
+ goog.debug.entryPointRegistry.monitors_.push(monitor)
};
goog.debug.entryPointRegistry.unmonitorAllIfPossible = function(monitor) {
+ var monitors = goog.debug.entryPointRegistry.monitors_;
+ goog.asserts.assert(monitor == monitors[monitors.length - 1], "Only the most recent monitor can be unwrapped.");
for(var transformer = goog.bind(monitor.unwrap, monitor), i = 0;i < goog.debug.entryPointRegistry.refList_.length;i++) {
goog.debug.entryPointRegistry.refList_[i](transformer)
}
+ monitors.length--
};
goog.debug.errorHandlerWeakDep = {protectEntryPoint:function(fn) {
return fn
@@ -2812,11 +2827,11 @@
return this.map_.__iterator__(!1)
};
goog.debug.catchErrors = function(logFunc, opt_cancel, opt_target) {
- var target = opt_target || goog.global, oldErrorHandler = target.onerror;
+ var target = opt_target || goog.global, oldErrorHandler = target.onerror, retVal = goog.userAgent.WEBKIT ? !opt_cancel : !!opt_cancel;
target.onerror = function(message, url, line) {
oldErrorHandler && oldErrorHandler(message, url, line);
logFunc({message:message, fileName:url, line:line});
- return Boolean(opt_cancel)
+ return retVal
}
};
goog.debug.expose = function(obj, opt_showFn) {
@@ -3298,7 +3313,11 @@
goog.reflect.object = function(type, object) {
return object
};
-goog.reflect.sinkValue = new Function("a", "return a");
+goog.reflect.sinkValue = function(x) {
+ goog.reflect.sinkValue[" "](x);
+ return x
+};
+goog.reflect.sinkValue[" "] = goog.nullFunction;
goog.reflect.canAccessProperty = function(obj, prop) {
try {
return goog.reflect.sinkValue(obj[prop]), !0
diff --git a/google/appengine/tools/dev_appserver.py b/google/appengine/tools/dev_appserver.py
index 181952c..6270569 100755
--- a/google/appengine/tools/dev_appserver.py
+++ b/google/appengine/tools/dev_appserver.py
@@ -387,7 +387,7 @@
as equal.
"""
results = []
- for attribute in self.ATTRUBUTES:
+ for attribute in self.ATTRIBUTES:
results.append('%s: %s' % (attributes, getattr(self, attributes)))
return '<AppServerRequest %s>' % ' '.join(results)
@@ -2981,11 +2981,11 @@
old_env = os.environ.copy()
old_cwd = os.getcwd()
old_file_type = types.FileType
+ old_path = sys.path[:]
reset_modules = False
try:
ClearAllButEncodingsModules(sys.modules)
- before_path = sys.path[:]
sys.modules.update(module_dict)
sys.argv = [cgi_path]
@@ -3047,7 +3047,7 @@
- sys.path[:] = before_path
+ sys.path[:] = old_path
os.environ.clear()
os.environ.update(old_env)
diff --git a/google/appengine/tools/dev_appserver_blobstore.py b/google/appengine/tools/dev_appserver_blobstore.py
index 8437520..f963c14 100755
--- a/google/appengine/tools/dev_appserver_blobstore.py
+++ b/google/appengine/tools/dev_appserver_blobstore.py
@@ -59,6 +59,19 @@
AUTO_MIME_TYPE = 'application/vnd.google.appengine.auto'
+ERROR_RESPONSE_TEMPLATE = """
+<html>
+ <head>
+ <title>%(response_code)d %(response_string)s</title>
+ </head>
+ <body text=#000000 bgcolor=#ffffff>
+ <h1>Error: %(response_string)s</h1>
+ <h2>%(response_text)s</h2>
+ </body>
+</html>
+"""
+
+
def GetBlobStorage():
"""Get blob-storage from api-proxy stub map.
@@ -314,6 +327,8 @@
if upload_session:
success_path = upload_session['success_path']
+ max_bytes_per_blob = upload_session['max_bytes_per_blob']
+ max_bytes_total = upload_session['max_bytes_total']
upload_form = cgi.FieldStorage(fp=request.infile,
headers=request.headers,
@@ -323,7 +338,10 @@
mime_message_string = self.__cgi_handler.GenerateMIMEMessageString(
- upload_form)
+ upload_form,
+ max_bytes_per_blob=max_bytes_per_blob,
+ max_bytes_total=max_bytes_total)
+
datastore.Delete(upload_session)
self.current_session = upload_session
@@ -347,35 +365,39 @@
force_admin=True)
except dev_appserver_upload.InvalidMIMETypeFormatError:
outfile.write('Status: 400\n\n')
+ except dev_appserver_upload.UploadEntityTooLargeError:
+ outfile.write('Status: 413\n\n')
+ response = ERROR_RESPONSE_TEMPLATE % {
+ 'response_code': 413,
+ 'response_string': 'Request Entity Too Large',
+ 'response_text': 'Your client issued a request that was too '
+ 'large.'}
+ outfile.write(response)
else:
logging.error('Could not find session for %s', upload_key)
outfile.write('Status: 404\n\n')
- def EndRedirect(self, redirected_outfile, original_outfile):
+ def EndRedirect(self, dispatched_output, original_output):
"""Handle the end of upload complete notification.
Makes sure the application upload handler returned an appropriate status
code.
"""
- response = dev_appserver.RewriteResponse(redirected_outfile)
+ response = dev_appserver.RewriteResponse(dispatched_output)
logging.info('Upload handler returned %d', response.status_code)
+ outfile = cStringIO.StringIO()
+ outfile.write('Status: %s\n' % response.status_code)
- if (response.status_code in (301, 302, 303) and
- (not response.body or len(response.body.read()) == 0)):
- contentless_outfile = cStringIO.StringIO()
-
-
- contentless_outfile.write('Status: %s\n' % response.status_code)
- contentless_outfile.write(''.join(response.headers.headers))
- contentless_outfile.seek(0)
- dev_appserver.URLDispatcher.EndRedirect(self,
- contentless_outfile,
- original_outfile)
+ if response.body and len(response.body.read()) > 0:
+ response.body.seek(0)
+ outfile.write(response.body.read())
else:
- logging.error(
- 'Invalid upload handler response. Only 301, 302 and 303 '
- 'statuses are permitted and it may not have a content body.')
- original_outfile.write('Status: 500\n\n')
+ outfile.write(''.join(response.headers.headers))
+
+ outfile.seek(0)
+ dev_appserver.URLDispatcher.EndRedirect(self,
+ outfile,
+ original_output)
return UploadDispatcher()
diff --git a/google/appengine/tools/dev_appserver_main.py b/google/appengine/tools/dev_appserver_main.py
index 4c4b26e..7fc120e 100755
--- a/google/appengine/tools/dev_appserver_main.py
+++ b/google/appengine/tools/dev_appserver_main.py
@@ -367,17 +367,20 @@
print >>sys.stderr, 'Invalid value supplied for port'
PrintUsageExit(1)
+ def expand_path(s):
+ return os.path.abspath(os.path.expanduser(s))
+
if option in ('-a', '--address'):
option_dict[ARG_ADDRESS] = value
if option == '--blobstore_path':
- option_dict[ARG_BLOBSTORE_PATH] = os.path.abspath(value)
+ option_dict[ARG_BLOBSTORE_PATH] = expand_path(value)
if option == '--datastore_path':
- option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)
+ option_dict[ARG_DATASTORE_PATH] = expand_path(value)
if option == '--prospective_search_path':
- option_dict[ARG_PROSPECTIVE_SEARCH_PATH] = os.path.abspath(value)
+ option_dict[ARG_PROSPECTIVE_SEARCH_PATH] = expand_path(value)
if option == '--skip_sdk_update_check':
option_dict[ARG_SKIP_SDK_UPDATE_CHECK] = True
@@ -389,7 +392,7 @@
option_dict[ARG_HIGH_REPLICATION] = True
if option == '--history_path':
- option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)
+ option_dict[ARG_HISTORY_PATH] = expand_path(value)
if option in ('-c', '--clear_datastore'):
option_dict[ARG_CLEAR_DATASTORE] = True
@@ -555,27 +558,6 @@
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
- version_tuple = tuple(sys.version_info[:2])
-
- if ARG_MULTIPROCESS not in option_dict and WARN_ABOUT_PYTHON_VERSION:
- if version_tuple < PRODUCTION_VERSION:
- sys.stderr.write('Warning: You are using a Python runtime (%d.%d) that '
- 'is older than the production runtime environment '
- '(%d.%d). Your application may be dependent on Python '
- 'behaviors that have changed and may not work correctly '
- 'when deployed to production.\n' % (
- version_tuple[0], version_tuple[1],
- PRODUCTION_VERSION[0], PRODUCTION_VERSION[1]))
-
- if version_tuple > PRODUCTION_VERSION:
- sys.stderr.write('Warning: You are using a Python runtime (%d.%d) that '
- 'is more recent than the production runtime environment '
- '(%d.%d). Your application may use features that are not '
- 'available in the production environment and may not work '
- 'correctly when deployed to production.\n' % (
- version_tuple[0], version_tuple[1],
- PRODUCTION_VERSION[0], PRODUCTION_VERSION[1]))
-
root_path = args[0]
if '_DEFAULT_ENV_AUTH_DOMAIN' in option_dict:
@@ -604,6 +586,30 @@
logging.error('Application configuration file invalid:\n%s', e)
return 1
+ version_tuple = tuple(sys.version_info[:2])
+ expected_version = PRODUCTION_VERSION
+ if appinfo.runtime == 'python27':
+ expected_version = (2, 7)
+
+ if ARG_MULTIPROCESS not in option_dict and WARN_ABOUT_PYTHON_VERSION:
+ if version_tuple < expected_version:
+ sys.stderr.write('Warning: You are using a Python runtime (%d.%d) that '
+ 'is older than the production runtime environment '
+ '(%d.%d). Your application may be dependent on Python '
+ 'behaviors that have changed and may not work correctly '
+ 'when deployed to production.\n' % (
+ version_tuple[0], version_tuple[1],
+ expected_version[0], expected_version[1]))
+
+ if version_tuple > expected_version:
+ sys.stderr.write('Warning: You are using a Python runtime (%d.%d) that '
+ 'is more recent than the production runtime environment '
+ '(%d.%d). Your application may use features that are '
+ 'not available in the production environment and may '
+ 'not work correctly when deployed to production.\n' % (
+ version_tuple[0], version_tuple[1],
+ expected_version[0], expected_version[1]))
+
multiprocess.Init(argv, option_dict, root_path, appinfo)
dev_process = multiprocess.GlobalProcess()
port = option_dict[ARG_PORT]
diff --git a/google/appengine/tools/dev_appserver_upload.py b/google/appengine/tools/dev_appserver_upload.py
index 753dd27..ef09e19 100755
--- a/google/appengine/tools/dev_appserver_upload.py
+++ b/google/appengine/tools/dev_appserver_upload.py
@@ -68,6 +68,10 @@
"""MIME type was formatted incorrectly."""
+class UploadEntityTooLargeError(Error):
+ """Entity being uploaded exceeded the allowed size."""
+
+
def GenerateBlobKey(time_func=time.time, random_func=random.random):
"""Generate a unique BlobKey.
@@ -190,14 +194,27 @@
content_type_formatter['content-type'].decode('utf-8'))
blob_entity['creation'] = creation
blob_entity['filename'] = form_item.filename.decode('utf-8')
- form_item.file.seek(0, 2)
- size = form_item.file.tell()
+
form_item.file.seek(0)
- blob_entity['size'] = size
+ digester = hashlib.md5()
+ while True:
+ block = form_item.file.read(1 << 20)
+ if not block:
+ break
+ digester.update(block)
+
+ blob_entity['md5_hash'] = digester.hexdigest()
+ blob_entity['size'] = form_item.file.tell()
+ form_item.file.seek(0)
+
datastore.Put(blob_entity)
return blob_entity
- def _GenerateMIMEMessage(self, form, boundary=None):
+ def _GenerateMIMEMessage(self,
+ form,
+ boundary=None,
+ max_bytes_per_blob=None,
+ max_bytes_total=None):
"""Generate a new post from original form.
Also responsible for storing blobs in the datastore.
@@ -207,12 +224,20 @@
derived from original post data.
boundary: Boundary to use for resulting form. Used only in tests so
that the boundary is always consistent.
+ max_bytes_per_blob: The maximum size in bytes that any single blob
+ in the form is allowed to be.
+ max_bytes_total: The maximum size in bytes that the total of all blobs
+ in the form is allowed to be.
Returns:
A MIMEMultipart instance representing the new HTTP post which should be
forwarded to the developers actual CGI handler. DO NOT use the return
value of this method to generate a string unless you know what you're
doing and properly handle folding whitespace (from rfc822) properly.
+
+ Raises:
+ UploadEntityTooLargeError: The upload exceeds either the
+ max_bytes_per_blob or max_bytes_total limits.
"""
message = multipart.MIMEMultipart('form-data', boundary)
for name, value in form.headers.items():
@@ -245,6 +270,10 @@
yield form_item
creation = self.__now_func()
+ total_bytes_uploaded = 0
+ created_blobs = []
+ upload_too_large = False
+
for form_item in IterateForm():
@@ -272,18 +301,43 @@
main_type, sub_type = _SplitMIMEType(form_item.type)
+ form_item.file.seek(0, 2)
+ content_length = form_item.file.tell()
+ form_item.file.seek(0)
+
+ total_bytes_uploaded += content_length
+
+ if max_bytes_per_blob is not None:
+ if max_bytes_per_blob < content_length:
+ upload_too_large = True
+ break
+ if max_bytes_total is not None:
+ if max_bytes_total < total_bytes_uploaded:
+ upload_too_large = True
+ break
+
+
blob_entity = self.StoreBlob(form_item, creation)
+
+ created_blobs.append(blob_entity)
+
variable = base.MIMEBase('message',
'external-body',
access_type=blobstore.BLOB_KEY_HEADER,
blob_key=blob_entity.key().name())
- form_item.file.seek(0, 2)
- content_length = form_item.file.tell()
form_item.file.seek(0)
+ digester = hashlib.md5()
+ while True:
+ block = form_item.file.read(1 << 20)
+ if not block:
+ break
+ digester.update(block)
+ blob_key = base64.urlsafe_b64encode(digester.hexdigest())
+ form_item.file.seek(0)
external = base.MIMEBase(main_type,
sub_type,
@@ -292,6 +346,7 @@
headers['Content-Length'] = str(content_length)
headers[blobstore.UPLOAD_INFO_CREATION_HEADER] = (
blobstore._format_creation(creation))
+ headers['Content-MD5'] = blob_key
for key, value in headers.iteritems():
external.add_header(key, value)
@@ -312,9 +367,18 @@
**disposition_parameters)
message.attach(variable)
+ if upload_too_large:
+ for blob in created_blobs:
+ datastore.Delete(blob)
+ raise UploadEntityTooLargeError()
+
return message
- def GenerateMIMEMessageString(self, form, boundary=None):
+ def GenerateMIMEMessageString(self,
+ form,
+ boundary=None,
+ max_bytes_per_blob=None,
+ max_bytes_total=None):
"""Generate a new post string from original form.
Args:
@@ -322,11 +386,18 @@
derived from original post data.
boundary: Boundary to use for resulting form. Used only in tests so
that the boundary is always consistent.
+ max_bytes_per_blob: The maximum size in bytes that any single blob
+ in the form is allowed to be.
+ max_bytes_total: The maximum size in bytes that the total of all blobs
+ in the form is allowed to be.
Returns:
A string rendering of a MIMEMultipart instance.
"""
- message = self._GenerateMIMEMessage(form, boundary=boundary)
+ message = self._GenerateMIMEMessage(form,
+ boundary=boundary,
+ max_bytes_per_blob=max_bytes_per_blob,
+ max_bytes_total=max_bytes_total)
message_out = cStringIO.StringIO()
gen = generator.Generator(message_out, maxheaderlen=0)
gen.flatten(message, unixfrom=False)
diff --git a/google/net/proto/ProtocolBuffer.py b/google/net/proto/ProtocolBuffer.py
index 1146c83..4f2d661 100644
--- a/google/net/proto/ProtocolBuffer.py
+++ b/google/net/proto/ProtocolBuffer.py
@@ -26,6 +26,7 @@
import httplib
__all__ = ['ProtocolMessage', 'Encoder', 'Decoder',
+ 'ExtendableProtocolMessage',
'ProtocolBufferDecodeError',
'ProtocolBufferEncodeError',
'ProtocolBufferReturnError']
@@ -294,6 +295,30 @@
return "false"
+TYPE_DOUBLE = 1
+TYPE_FLOAT = 2
+TYPE_INT64 = 3
+TYPE_UINT64 = 4
+TYPE_INT32 = 5
+TYPE_FIXED64 = 6
+TYPE_FIXED32 = 7
+TYPE_BOOL = 8
+TYPE_STRING = 9
+TYPE_GROUP = 10
+TYPE_FOREIGN = 11
+
+
+_TYPE_TO_DEBUG_STRING = {
+ TYPE_INT32: ProtocolMessage.DebugFormatInt32,
+ TYPE_INT64: ProtocolMessage.DebugFormatInt64,
+ TYPE_UINT64: ProtocolMessage.DebugFormatInt64,
+ TYPE_FLOAT: ProtocolMessage.DebugFormatFloat,
+ TYPE_STRING: ProtocolMessage.DebugFormatString,
+ TYPE_FIXED32: ProtocolMessage.DebugFormatFixed32,
+ TYPE_FIXED64: ProtocolMessage.DebugFormatFixed64,
+ TYPE_BOOL: ProtocolMessage.DebugFormatBool }
+
+
class Encoder:
@@ -437,6 +462,23 @@
def putRawString(self, v):
self.buf.fromstring(v)
+ _TYPE_TO_METHOD = {
+ TYPE_DOUBLE: putDouble,
+ TYPE_FLOAT: putFloat,
+ TYPE_FIXED64: put64,
+ TYPE_FIXED32: put32,
+ TYPE_INT32: putVarInt32,
+ TYPE_INT64: putVarInt64,
+ TYPE_UINT64: putVarUint64,
+ TYPE_BOOL: putBoolean,
+ TYPE_STRING: putPrefixedString }
+
+ _TYPE_TO_BYTE_SIZE = {
+ TYPE_DOUBLE: 8,
+ TYPE_FLOAT: 4,
+ TYPE_FIXED64: 8,
+ TYPE_FIXED32: 4,
+ TYPE_BOOL: 1 }
class Decoder:
def __init__(self, buf, idx, limit):
@@ -597,6 +639,326 @@
self.idx = self.limit
return r.tostring()
+ _TYPE_TO_METHOD = {
+ TYPE_DOUBLE: getDouble,
+ TYPE_FLOAT: getFloat,
+ TYPE_FIXED64: get64,
+ TYPE_FIXED32: get32,
+ TYPE_INT32: getVarInt32,
+ TYPE_INT64: getVarInt64,
+ TYPE_UINT64: getVarUint64,
+ TYPE_BOOL: getBoolean,
+ TYPE_STRING: getPrefixedString }
+
+
+
+
+
+class ExtensionIdentifier(object):
+ __slots__ = ('full_name', 'number', 'field_type', 'wire_tag', 'is_repeated',
+ 'default', 'containing_cls', 'composite_cls', 'message_name')
+ def __init__(self, full_name, number, field_type, wire_tag, is_repeated,
+ default):
+ self.full_name = full_name
+ self.number = number
+ self.field_type = field_type
+ self.wire_tag = wire_tag
+ self.is_repeated = is_repeated
+ self.default = default
+
+class ExtendableProtocolMessage(ProtocolMessage):
+ def HasExtension(self, extension):
+ self._VerifyExtensionIdentifier(extension)
+ return extension in self._extension_fields
+
+ def ClearExtension(self, extension):
+ self._VerifyExtensionIdentifier(extension)
+ if extension in self._extension_fields:
+ del self._extension_fields[extension]
+
+ def GetExtension(self, extension, index=None):
+ self._VerifyExtensionIdentifier(extension)
+ if extension in self._extension_fields:
+ result = self._extension_fields[extension]
+ else:
+ if extension.is_repeated:
+ result = []
+ elif extension.composite_cls:
+ result = extension.composite_cls()
+ else:
+ result = extension.default
+ if extension.is_repeated:
+ result = result[index]
+ return result
+
+ def SetExtension(self, extension, *args):
+ self._VerifyExtensionIdentifier(extension)
+ if extension.composite_cls:
+ raise TypeError(
+ 'Cannot assign to extension "%s" because it is a composite type.' %
+ extension.full_name)
+ if extension.is_repeated:
+ if (len(args) != 2):
+ raise TypeError(
+ 'SetExtension(extension, index, value) for repeated extension '
+ 'takes exactly 3 arguments: (%d given)' % len(args))
+ index = args[0]
+ value = args[1]
+ self._extension_fields[extension][index] = value
+ else:
+ if (len(args) != 1):
+ raise TypeError(
+ 'SetExtension(extension, value) for singular extension '
+ 'takes exactly 3 arguments: (%d given)' % len(args))
+ value = args[0]
+ self._extension_fields[extension] = value
+
+ def MutableExtension(self, extension, index=None):
+ self._VerifyExtensionIdentifier(extension)
+ if extension.composite_cls is None:
+ raise TypeError(
+ 'MutableExtension() cannot be applied to "%s", because it is not a '
+ 'composite type.' % extension.full_name)
+ if extension.is_repeated:
+ if index is None:
+ raise TypeError(
+ 'MutableExtension(extension, index) for repeated extension '
+ 'takes exactly 2 arguments: (1 given)')
+ return self.GetExtension(extension, index)
+ if extension in self._extension_fields:
+ return self._extension_fields[extension]
+ else:
+ result = extension.composite_cls()
+ self._extension_fields[extension] = result
+ return result
+
+ def ExtensionList(self, extension):
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'ExtensionList() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ return self._extension_fields[extension]
+ result = []
+ self._extension_fields[extension] = result
+ return result
+
+ def ExtensionSize(self, extension):
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'ExtensionSize() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ return len(self._extension_fields[extension])
+ return 0
+
+ def AddExtension(self, extension, value=None):
+ self._VerifyExtensionIdentifier(extension)
+ if not extension.is_repeated:
+ raise TypeError(
+ 'AddExtension() cannot be applied to "%s", because it is not a '
+ 'repeated extension.' % extension.full_name)
+ if extension in self._extension_fields:
+ field = self._extension_fields[extension]
+ else:
+ field = []
+ self._extension_fields[extension] = field
+
+ if extension.composite_cls:
+ if value is not None:
+ raise TypeError(
+ 'value must not be set in AddExtension() for "%s", because it is '
+ 'a message type extension. Set values on the returned message '
+ 'instead.' % extension.full_name)
+ msg = extension.composite_cls()
+ field.append(msg)
+ return msg
+
+ field.append(value)
+
+ def _VerifyExtensionIdentifier(self, extension):
+ if extension.containing_cls != self.__class__:
+ raise TypeError("Containing type of %s is %s, but not %s."
+ % (extension.full_name,
+ extension.containing_cls.__name__,
+ self.__class__.__name__))
+
+ def _MergeExtensionFields(self, x):
+ for ext, val in x._extension_fields.items():
+ if ext.is_repeated:
+ for i in xrange(len(val)):
+ if ext.composite_cls is None:
+ self.AddExtension(ext, val[i])
+ else:
+ self.AddExtension(ext).MergeFrom(val[i])
+ else:
+ if ext.composite_cls is None:
+ self.SetExtension(ext, val)
+ else:
+ self.MutableExtension(ext).MergeFrom(val)
+
+ def _ListExtensions(self):
+ result = [ext for ext in self._extension_fields.keys()
+ if (not ext.is_repeated) or self.ExtensionSize(ext) > 0]
+ result.sort(key = lambda item: item.number)
+ return result
+
+ def _ExtensionEquals(self, x):
+ extensions = self._ListExtensions()
+ if extensions != x._ListExtensions():
+ return False
+ for ext in extensions:
+ if ext.is_repeated:
+ if self.ExtensionSize(ext) != x.ExtensionSize(ext): return False
+ for e1, e2 in zip(self.ExtensionList(ext),
+ x.ExtensionList(ext)):
+ if e1 != e2: return False
+ else:
+ if self.GetExtension(ext) != x.GetExtension(ext): return False
+ return True
+
+ def _OutputExtensionFields(self, out, partial, extensions, start_index,
+ end_field_number):
+ def OutputSingleField(ext, value):
+ out.putVarInt32(ext.wire_tag)
+ if ext.field_type == TYPE_GROUP:
+ if partial:
+ value.OutputPartial(out)
+ else:
+ value.OutputUnchecked(out)
+ out.putVarInt32(wire_tag + 1)
+ elif ext.field_type == TYPE_FOREIGN:
+ if partial:
+ out.putVarInt32(value.ByteSizePartial())
+ value.OutputPartial(out)
+ else:
+ out.putVarInt32(value.ByteSize())
+ value.OutputUnchecked(out)
+ else:
+ Encoder._TYPE_TO_METHOD[ext.field_type](out, value)
+
+ size = len(extensions)
+ for ext_index in xrange(start_index, size):
+ ext = extensions[ext_index]
+ if ext.number >= end_field_number:
+
+ return ext_index
+ if ext.is_repeated:
+ for i in xrange(len(self._extension_fields[ext])):
+ OutputSingleField(ext, self._extension_fields[ext][i])
+ else:
+ OutputSingleField(ext, self._extension_fields[ext])
+ return size
+
+ def _ParseOneExtensionField(self, wire_tag, d):
+ number = wire_tag >> 3
+ if number in self._extensions_by_field_number:
+ ext = self._extensions_by_field_number[number]
+ if wire_tag != ext.wire_tag:
+
+ return
+ if ext.field_type == TYPE_FOREIGN:
+ length = d.getVarInt32()
+ tmp = Decoder(d.buffer(), d.pos(), d.pos() + length)
+ if ext.is_repeated:
+ self.AddExtension(ext).TryMerge(tmp)
+ else:
+ self.MutableExtension(ext).TryMerge(tmp)
+ d.skip(length)
+ elif ext.field_type == TYPE_GROUP:
+ if ext.is_repeated:
+ self.AddExtension(ext).TryMerge(d)
+ else:
+ self.MutableExtension(ext).TryMerge(d)
+ else:
+ value = Decoder._TYPE_TO_METHOD[ext.field_type](d)
+ if ext.is_repeated:
+ self.AddExtension(ext, value)
+ else:
+ self.SetExtension(ext, value)
+ else:
+
+ d.skipData(wire_tag)
+
+ def _ExtensionByteSize(self, partial):
+ size = 0
+ for extension, value in self._extension_fields.items():
+ ftype = extension.field_type
+ tag_size = self.lengthVarInt64(extension.wire_tag)
+ if ftype == TYPE_GROUP:
+ tag_size *= 2
+ if extension.is_repeated:
+ size += tag_size * len(value)
+ for single_value in value:
+ size += self._FieldByteSize(ftype, single_value, partial)
+ else:
+ size += tag_size + self._FieldByteSize(ftype, value, partial)
+ return size
+
+ def _FieldByteSize(self, ftype, value, partial):
+ size = 0
+ if ftype == TYPE_STRING:
+ size = self.lengthString(len(value))
+ elif ftype == TYPE_FOREIGN or ftype == TYPE_GROUP:
+ if partial:
+ size = self.lengthString(value.ByteSizePartial())
+ else:
+ size = self.lengthString(value.ByteSize())
+ elif ftype == TYPE_INT64 or ftype == TYPE_UINT64 or ftype == TYPE_INT32:
+ size = self.lengthVarInt64(value)
+ else:
+ if ftype in Encoder._TYPE_TO_BYTE_SIZE:
+ size = Encoder._TYPE_TO_BYTE_SIZE[ftype]
+ else:
+ raise AssertionError(
+ 'Extension type %d is not recognized.' % ftype)
+ return size
+
+ def _ExtensionDebugString(self, prefix, printElemNumber):
+ res = ''
+ extensions = self._ListExtensions()
+ for extension in extensions:
+ value = self._extension_fields[extension]
+ if extension.is_repeated:
+ cnt = 0
+ for e in value:
+ elm=""
+ if printElemNumber: elm = "(%d)" % cnt
+ if extension.composite_cls is not None:
+ res += prefix + "[%s%s] {\n" % (extension.full_name, elm)
+ res += e.__str__(prefix + " ", printElemNumber)
+ res += prefix + "}\n"
+ else:
+ if extension.composite_cls is not None:
+ res += prefix + "[%s] {\n" % extension.full_name
+ res += value.__str__(
+ prefix + " ", printElemNumber)
+ res += prefix + "}\n"
+ else:
+ if extension.field_type in _TYPE_TO_DEBUG_STRING:
+ text_value = _TYPE_TO_DEBUG_STRING[
+ extension.field_type](self, value)
+ else:
+ text_value = self.DebugFormat(value)
+ res += prefix + "[%s]: %s\n" % (extension.full_name, text_value)
+ return res
+
+ @staticmethod
+ def _RegisterExtension(cls, extension, composite_cls=None):
+ extension.containing_cls = cls
+ extension.composite_cls = composite_cls
+ if composite_cls is not None:
+ extension.message_name = composite_cls._PROTO_DESCRIPTOR_NAME
+ actual_handle = cls._extensions_by_field_number.setdefault(
+ extension.number, extension)
+ if actual_handle is not extension:
+ raise AssertionError(
+ 'Extensions "%s" and "%s" both try to extend message type "%s" with'
+ 'field number %d.' %
+ (extension.full_name, actual_handle.full_name,
+ cls.__name__, extension.number))
class ProtocolBufferDecodeError(Exception): pass
class ProtocolBufferEncodeError(Exception): pass
diff --git a/google/net/proto2/python/internal/python_message.py b/google/net/proto2/python/internal/python_message.py
index 195cbb4..b4c5f24 100755
--- a/google/net/proto2/python/internal/python_message.py
+++ b/google/net/proto2/python/internal/python_message.py
@@ -451,6 +451,11 @@
proto_field_name = field.name
property_name = _PropertyName(proto_field_name)
+
+
+
+
+
message_type = field.message_type
def getter(self):
diff --git a/google/net/proto2/python/public/descriptor.py b/google/net/proto2/python/public/descriptor.py
index e0975ad..5a1ee48 100755
--- a/google/net/proto2/python/public/descriptor.py
+++ b/google/net/proto2/python/public/descriptor.py
@@ -35,6 +35,10 @@
"""Base error for this module."""
+class TypeTransformationError(Error):
+ """Error transforming between python proto type and corresponding C++ type."""
+
+
class DescriptorBase(object):
"""Descriptors base class.
@@ -365,6 +369,17 @@
CPPTYPE_MESSAGE = 10
MAX_CPPTYPE = 10
+ _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
+ TYPE_DOUBLE: CPPTYPE_DOUBLE,
+ TYPE_FLOAT: CPPTYPE_FLOAT,
+ TYPE_ENUM: CPPTYPE_ENUM,
+ TYPE_INT64: CPPTYPE_INT64,
+ TYPE_INT32: CPPTYPE_INT32,
+ TYPE_STRING: CPPTYPE_STRING,
+ TYPE_BOOL: CPPTYPE_BOOL,
+ TYPE_MESSAGE: CPPTYPE_MESSAGE
+ }
+
@@ -408,6 +423,26 @@
else:
self._cdescriptor = None
+ @staticmethod
+ def ProtoTypeToCppProtoType(proto_type):
+ """Converts from a Python proto type to a C++ Proto Type.
+
+ The Python ProtocolBuffer classes specify both the 'Python' datatype and the
+ 'C++' datatype - and they're not the same. This helper method should
+ translate from one to another.
+
+ Args:
+ proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
+ Returns:
+ descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
+ Raises:
+ TypeTransformationError: when the Python proto type isn't known.
+ """
+ try:
+ return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
+ except KeyError:
+ raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
+
class EnumDescriptor(_NestedDescriptorBase):
@@ -603,3 +638,31 @@
"""
message.ParseFromString(string)
return message
+
+
+def MakeDescriptor(desc_proto, package=''):
+ """Make a protobuf Descriptor given a DescriptorProto protobuf.
+
+ Args:
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+ package: Optional package name for the new message Descriptor (string).
+
+ Returns:
+ A Descriptor for protobuf messages.
+ """
+ full_message_name = [desc_proto.name]
+ if package: full_message_name.insert(0, package)
+ fields = []
+ for field_proto in desc_proto.field:
+ full_name = '.'.join(full_message_name + [field_proto.name])
+ field = FieldDescriptor(
+ field_proto.name, full_name, field_proto.number - 1,
+ field_proto.number, field_proto.type,
+ FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
+ field_proto.label, None, None, None, None, False, None,
+ has_default_value=False)
+ fields.append(field)
+
+ desc_name = '.'.join(full_message_name)
+ return Descriptor(desc_proto.name, desc_name, None, None, fields,
+ [], [], [])
diff --git a/google/net/proto2/python/public/reflection.py b/google/net/proto2/python/public/reflection.py
index 976e51d..75e38fd 100755
--- a/google/net/proto2/python/public/reflection.py
+++ b/google/net/proto2/python/public/reflection.py
@@ -39,6 +39,8 @@
from google.net.proto2.python.internal import api_implementation
from google.net.proto2.python.public import descriptor as descriptor_mod
+from google.net.proto2.python.public import message
+
_FieldDescriptor = descriptor_mod.FieldDescriptor
@@ -129,3 +131,23 @@
_InitMessage(descriptor, cls)
superclass = super(GeneratedProtocolMessageType, cls)
superclass.__init__(name, bases, dictionary)
+
+
+def ParseMessage(descriptor, byte_str):
+ """Generate a new Message instance from this Descriptor and a byte string.
+
+ Args:
+ descriptor: Protobuf Descriptor object
+ byte_str: Serialized protocol buffer byte string
+
+ Returns:
+ Newly created protobuf Message object.
+ """
+
+ class _ResultClass(message.Message):
+ __metaclass__ = GeneratedProtocolMessageType
+ DESCRIPTOR = descriptor
+
+ new_msg = _ResultClass()
+ new_msg.ParseFromString(byte_str)
+ return new_msg
diff --git a/google/storage/speckle/proto/client_error_code_pb2.py b/google/storage/speckle/proto/client_error_code_pb2.py
new file mode 100644
index 0000000..2cd328d
--- /dev/null
+++ b/google/storage/speckle/proto/client_error_code_pb2.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+from google.net.proto2.python.public import descriptor
+from google.net.proto2.python.public import message
+from google.net.proto2.python.public import reflection
+from google.net.proto2.proto import descriptor_pb2
+
+
+
+
+
+DESCRIPTOR = descriptor.FileDescriptor(
+ name='storage/speckle/proto/client_error_code.proto',
+ package='speckle.sql',
+ serialized_pb='\n-storage/speckle/proto/client_error_code.proto\x12\x0bspeckle.sql\"\xf1\x06\n\x15SqlServiceClientError\"\xd7\x06\n\x0f\x43lientErrorCode\x12\x06\n\x02OK\x10\x00\x12\x13\n\x0fTRANSIENT_ERROR\x10\x01\x12\x12\n\x0eINTERNAL_ERROR\x10\x02\x12\x13\n\x0fINVALID_REQUEST\x10\x03\x12\x16\n\x12\x44\x45PRECATED_TIMEOUT\x10\x04\x12\x1d\n\x19\x44\x45PRECATED_NOT_AUTHORIZED\x10\x05\x12\x1a\n\x16\x44\x45PRECATED_RDBMS_ERROR\x10\x06\x12\"\n\x1d\x45RROR_PUBLIC_ERROR_CODE_START\x10\xe8\x07\x12\x10\n\x0b\x45RROR_RDBMS\x10\xe9\x07\x12\x12\n\rERROR_TIMEOUT\x10\xea\x07\x12\x19\n\x14\x45RROR_NOT_AUTHORIZED\x10\xeb\x07\x12\x1d\n\x18\x45RROR_INSTANCE_SUSPENDED\x10\xec\x07\x12\x1c\n\x17\x45RROR_INVALID_PARAMETER\x10\xed\x07\x12\"\n\x1d\x45RROR_NOT_ALL_VARIABLES_BOUND\x10\xee\x07\x12\x1d\n\x18\x45RROR_UNKNOWN_CONNECTION\x10\xef\x07\x12\x1c\n\x17\x45RROR_UNKNOWN_STATEMENT\x10\xf0\x07\x12\x1a\n\x15\x45RROR_UNKNOWN_CATALOG\x10\xf1\x07\x12\x19\n\x14\x45RROR_UNKNOWN_CURSOR\x10\xf2\x07\x12\x1b\n\x16\x45RROR_CURSOR_EXHAUSTED\x10\xfc\x07\x12\x1e\n\x19\x45RROR_NOT_YET_IMPLEMENTED\x10\x86\x08\x12\x1a\n\x15\x45RROR_NOT_IMPLEMENTED\x10\x87\x08\x12\x1f\n\x1a\x45RROR_INSTANCE_MAINTENANCE\x10\x88\x08\x12\'\n\"ERROR_TOO_MANY_CONCURRENT_REQUESTS\x10\x89\x08\x12\"\n\x1d\x45RROR_RESOURCE_DOES_NOT_EXIST\x10\x8a\x08\x12\"\n\x1d\x45RROR_RESOURCE_ALREADY_EXISTS\x10\x8b\x08\x12\x1c\n\x17\x45RROR_CONNECTION_IN_USE\x10\x8c\x08\x12!\n\x1c\x45RROR_CLIENT_VERSION_TOO_OLD\x10\x8d\x08\x12\x1b\n\x16\x45RROR_RESPONSE_PENDING\x10\x8e\x08\x12(\n#ERROR_INSTANCE_SUSPENDED_BY_BILLING\x10\x8f\x08\x42%\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02P\x01')
+
+
+
+_SQLSERVICECLIENTERROR_CLIENTERRORCODE = descriptor.EnumDescriptor(
+ name='ClientErrorCode',
+ full_name='speckle.sql.SqlServiceClientError.ClientErrorCode',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ descriptor.EnumValueDescriptor(
+ name='OK', index=0, number=0,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='TRANSIENT_ERROR', index=1, number=1,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='INTERNAL_ERROR', index=2, number=2,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='INVALID_REQUEST', index=3, number=3,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='DEPRECATED_TIMEOUT', index=4, number=4,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='DEPRECATED_NOT_AUTHORIZED', index=5, number=5,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='DEPRECATED_RDBMS_ERROR', index=6, number=6,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_PUBLIC_ERROR_CODE_START', index=7, number=1000,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_RDBMS', index=8, number=1001,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_TIMEOUT', index=9, number=1002,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_NOT_AUTHORIZED', index=10, number=1003,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_INSTANCE_SUSPENDED', index=11, number=1004,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_INVALID_PARAMETER', index=12, number=1005,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_NOT_ALL_VARIABLES_BOUND', index=13, number=1006,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_UNKNOWN_CONNECTION', index=14, number=1007,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_UNKNOWN_STATEMENT', index=15, number=1008,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_UNKNOWN_CATALOG', index=16, number=1009,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_UNKNOWN_CURSOR', index=17, number=1010,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_CURSOR_EXHAUSTED', index=18, number=1020,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_NOT_YET_IMPLEMENTED', index=19, number=1030,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_NOT_IMPLEMENTED', index=20, number=1031,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_INSTANCE_MAINTENANCE', index=21, number=1032,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_TOO_MANY_CONCURRENT_REQUESTS', index=22, number=1033,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_RESOURCE_DOES_NOT_EXIST', index=23, number=1034,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_RESOURCE_ALREADY_EXISTS', index=24, number=1035,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_CONNECTION_IN_USE', index=25, number=1036,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_CLIENT_VERSION_TOO_OLD', index=26, number=1037,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_RESPONSE_PENDING', index=27, number=1038,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='ERROR_INSTANCE_SUSPENDED_BY_BILLING', index=28, number=1039,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=89,
+ serialized_end=944,
+)
+
+
+_SQLSERVICECLIENTERROR = descriptor.Descriptor(
+ name='SqlServiceClientError',
+ full_name='speckle.sql.SqlServiceClientError',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _SQLSERVICECLIENTERROR_CLIENTERRORCODE,
+ ],
+ options=None,
+ is_extendable=False,
+ extension_ranges=[],
+ serialized_start=63,
+ serialized_end=944,
+)
+
+_SQLSERVICECLIENTERROR_CLIENTERRORCODE.containing_type = _SQLSERVICECLIENTERROR;
+DESCRIPTOR.message_types_by_name['SqlServiceClientError'] = _SQLSERVICECLIENTERROR
+
+class SqlServiceClientError(message.Message):
+ __metaclass__ = reflection.GeneratedProtocolMessageType
+ DESCRIPTOR = _SQLSERVICECLIENTERROR
+
+
+
+
diff --git a/google/storage/speckle/proto/client_pb2.py b/google/storage/speckle/proto/client_pb2.py
index edd30f6..c540fb4 100755
--- a/google/storage/speckle/proto/client_pb2.py
+++ b/google/storage/speckle/proto/client_pb2.py
@@ -28,7 +28,7 @@
DESCRIPTOR = descriptor.FileDescriptor(
name='storage/speckle/proto/client.proto',
package='speckle',
- serialized_pb='\n\"storage/speckle/proto/client.proto\x12\x07speckle\"\xb6\x01\n\x11\x42indVariableProto\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x05\x12\x10\n\x08position\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12;\n\tdirection\x18\x05 \x01(\x0e\x32$.speckle.BindVariableProto.Direction:\x02IN\"\'\n\tDirection\x12\x06\n\x02IN\x10\x01\x12\x07\n\x03OUT\x10\x02\x12\t\n\x05INOUT\x10\x03\"\x8c\x03\n\x0bResultProto\x12\"\n\x04rows\x18\x01 \x01(\x0b\x32\x14.speckle.RowSetProto\x12\x14\n\x0crows_updated\x18\x02 \x01(\x03\x12\x16\n\x0egenerated_keys\x18\x03 \x03(\x0c\x12\'\n\x08warnings\x18\x04 \x03(\x0b\x32\x15.speckle.SqlException\x12,\n\rsql_exception\x18\x05 \x01(\x0b\x32\x15.speckle.SqlException\x12\x14\n\x0cstatement_id\x18\x06 \x01(\x04\x12\x18\n\tmore_rows\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cmore_results\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x33\n\x0foutput_variable\x18\t \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x1a\n\x12\x62\x61tch_rows_updated\x18\n \x03(\x03\x12\x36\n\x12parameter_metadata\x18\x0b \x03(\x0b\x32\x1a.speckle.ParameterMetadata\"\xf1\x05\n\x07OpProto\x12%\n\x04type\x18\x01 \x02(\x0e\x32\x17.speckle.OpProto.OpType\x12\x0f\n\x07\x63\x61talog\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\x12%\n\tsavepoint\x18\x04 \x01(\x0b\x32\x12.speckle.SavePoint\x12\x13\n\x0b\x61uto_commit\x18\x05 \x01(\x08\x12\x11\n\tread_only\x18\x06 \x01(\x08\x12G\n\x1btransaction_isolation_level\x18\x07 \x01(\x0e\x32\".speckle.TransactionIsolationLevel\x12\x14\n\x0cstatement_id\x18\x08 \x01(\x04\x12\x12\n\nrequest_id\x18\t \x01(\x04\"\xde\x03\n\x06OpType\x12\x0e\n\nNATIVE_SQL\x10\x01\x12\x0c\n\x08ROLLBACK\x10\x02\x12\x11\n\rSET_SAVEPOINT\x10\x03\x12\x13\n\x0fSET_AUTO_COMMIT\x10\x04\x12\x11\n\rSET_READ_ONLY\x10\x05\x12#\n\x1fSET_TRANSACTION_ISOLATION_LEVEL\x10\x06\x12\n\n\x06\x43OMMIT\x10\x07\x12\x0f\n\x0bSET_CATALOG\x10\x08\x12\x13\n\x0f\x43LOSE_STATEMENT\x10\t\x12\x08\n\x04PING\x10\n\x12\x0f\n\x0bNEXT_RESULT\x10\x0b\x12\t\n\x05RETRY\x10\x0c\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE13\x10\r\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE14\x10\x0e\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE15\x10\x0f\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE16\x10\x10\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE17\x10\x11\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE18\x10\x12\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE19\x10\x13\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE20\x10\x14\"%\n\tSavePoint\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x02(\t\"c\n\x0cSqlException\x12\x0f\n\x07message\x18\x01 \x02(\t\x12\x0f\n\x04\x63ode\x18\x02 \x02(\x05:\x01\x30\x12\x11\n\tsql_state\x18\x03 \x01(\t\x12\x1e\n\x16\x61pplication_error_code\x18\x04 \x01(\x05\"+\n\nTupleProto\x12\x0e\n\x06values\x18\x01 \x03(\x0c\x12\r\n\x05nulls\x18\x02 \x03(\x05\"\xc0\x03\n\x0b\x43olumnProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12\x10\n\x04type\x18\x03 \x01(\x05:\x02\x31\x32\x12\x12\n\ntable_name\x18\x04 \x01(\t\x12\x13\n\x0bschema_name\x18\x05 \x01(\t\x12\x14\n\x0c\x63\x61talog_name\x18\x06 \x01(\t\x12\x14\n\tprecision\x18\x07 \x01(\x05:\x01\x30\x12\x10\n\x05scale\x18\x08 \x01(\x05:\x01\x30\x12\x10\n\x08nullable\x18\t \x01(\x08\x12\x12\n\nsearchable\x18\n \x01(\x08\x12\x14\n\x0c\x64isplay_size\x18\x0b \x01(\x05\x12\x1d\n\x0e\x61uto_increment\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63\x61se_sensitive\x18\r \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08\x63urrency\x18\x0e \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x64\x65\x66initely_writable\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x10 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x06signed\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08writable\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x10\x63olumn_type_name\x18\x13 \x01(\t:\x00\"Y\n\x0bRowSetProto\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.speckle.ColumnProto\x12#\n\x06tuples\x18\x02 \x03(\x0b\x32\x13.speckle.TupleProto\"\xcb\x36\n\x19JdbcDatabaseMetaDataProto\x12*\n\x1b\x61ll_procedures_are_callable\x18\x01 \x01(\x08:\x05\x66\x61lse\x12(\n\x19\x61ll_tables_are_selectable\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x39\n*auto_commit_failure_closes_all_result_sets\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x38\n)data_definition_causes_transaction_commit\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x36\n\'data_definition_ignored_in_transactions\x18\x05 \x01(\x08:\x05\x66\x61lse\x12.\n\x1f\x64oes_max_row_size_include_blobs\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x11\x63\x61talog_separator\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61talog_term\x18\x08 \x01(\t\x12!\n\x16\x64\x61tabase_major_version\x18\t \x01(\x05:\x01\x30\x12!\n\x16\x64\x61tabase_minor_version\x18\n \x01(\x05:\x01\x30\x12&\n\x15\x64\x61tabase_product_name\x18\x0b \x01(\t:\x07Speckle\x12\"\n\x18\x64\x61tabase_product_version\x18\x0c \x01(\t:\x00\x12u\n\x1d\x64\x65\x66\x61ult_transaction_isolation\x18\r \x01(\x0e\x32\".speckle.TransactionIsolationLevel:*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x12\x1f\n\x15\x65xtra_name_characters\x18\x0e \x01(\t:\x00\x12!\n\x17identifier_quote_string\x18\x0f \x01(\t:\x00\x12\x1d\n\x12jdbc_major_version\x18\x10 \x01(\x05:\x01\x31\x12\x1d\n\x12jdbc_minor_version\x18\x11 \x01(\x05:\x01\x30\x12$\n\x19max_binary_literal_length\x18\x12 \x01(\x05:\x01\x30\x12\"\n\x17max_catalog_name_length\x18\x13 \x01(\x05:\x01\x30\x12\"\n\x17max_char_literal_length\x18\x14 \x01(\x05:\x01\x30\x12!\n\x16max_column_name_length\x18\x15 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_group_by\x18\x16 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_index\x18\x17 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_order_by\x18\x18 \x01(\x05:\x01\x30\x12 \n\x15max_columns_in_select\x18\x19 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_table\x18\x1a \x01(\x05:\x01\x30\x12\x1a\n\x0fmax_connections\x18\x1b \x01(\x05:\x01\x30\x12!\n\x16max_cursor_name_length\x18\x1c \x01(\x05:\x01\x30\x12\x1b\n\x10max_index_length\x18\x1d \x01(\x05:\x01\x30\x12$\n\x19max_procedure_name_length\x18\x1e \x01(\x05:\x01\x30\x12\x17\n\x0cmax_row_size\x18\x1f \x01(\x05:\x01\x30\x12!\n\x16max_schema_name_length\x18 \x01(\x05:\x01\x30\x12\x1f\n\x14max_statement_length\x18! \x01(\x05:\x01\x30\x12\x19\n\x0emax_statements\x18\" \x01(\x05:\x01\x30\x12 \n\x15max_table_name_length\x18# \x01(\x05:\x01\x30\x12\x1f\n\x14max_tables_in_select\x18$ \x01(\x05:\x01\x30\x12\x1f\n\x14max_user_name_length\x18% \x01(\x05:\x01\x30\x12\x1b\n\x11numeric_functions\x18& \x01(\t:\x00\x12\x18\n\x0eprocedure_term\x18\' \x01(\t:\x00\x12j\n\x15resultset_holdability\x18( \x01(\x0e\x32\x1d.speckle.ResultSetHoldability:,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x12i\n\x0erowid_lifetime\x18) \x01(\x0e\x32\x30.speckle.JdbcDatabaseMetaDataProto.RowIdLifetime:\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x12\x14\n\x0csql_keywords\x18* \x01(\t\x12\x63\n\x0esql_state_type\x18+ \x01(\x0e\x32/.speckle.JdbcDatabaseMetaDataProto.SqlStateType:\x1aSQLSTATETYPE_SQL_STATE_SQL\x12\x15\n\x0bschema_term\x18, \x01(\t:\x00\x12\x1c\n\x14search_string_escape\x18- \x01(\t\x12\x1a\n\x10string_functions\x18. \x01(\t:\x00\x12\x1a\n\x10system_functions\x18/ \x01(\t:\x00\x12\x1d\n\x13time_date_functions\x18\x30 \x01(\t:\x00\x12\x13\n\tuser_name\x18\x31 \x01(\t:\x00\x12\x1f\n\x10\x63\x61talog_at_start\x18\x32 \x01(\x08:\x05\x66\x61lse\x12#\n\x14locators_update_copy\x18\x33 \x01(\x08:\x05\x66\x61lse\x12)\n\x1anull_plus_non_null_is_null\x18\x34 \x01(\x08:\x05\x66\x61lse\x12&\n\x17nulls_are_sorted_at_end\x18\x35 \x01(\x08:\x05\x66\x61lse\x12(\n\x19nulls_are_sorted_at_start\x18\x36 \x01(\x08:\x05\x66\x61lse\x12$\n\x15nulls_are_sorted_high\x18\x37 \x01(\x08:\x05\x66\x61lse\x12#\n\x14nulls_are_sorted_low\x18\x38 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_lower_case_identifiers\x18\x39 \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_lower_case_quoted_identifiers\x18: \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_mixed_case_identifiers\x18; \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_mixed_case_quoted_identifiers\x18< \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_upper_case_identifiers\x18= \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_upper_case_quoted_identifiers\x18> \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_ansi92_entry_level_sql\x18? \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_ansi92_full_sql\x18@ \x01(\x08:\x05\x66\x61lse\x12/\n supports_ansi92_intermediate_sql\x18\x41 \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_alter_table_with_add_column\x18\x42 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_alter_table_with_drop_column\x18\x43 \x01(\x08:\x05\x66\x61lse\x12%\n\x16supports_batch_updates\x18\x44 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_data_manipulation\x18\x45 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_index_definitions\x18\x46 \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_catalogs_in_privilege_definitions\x18G \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_catalogs_in_procedure_calls\x18H \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_table_definitions\x18I \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_column_aliasing\x18J \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10supports_convert\x18K \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_core_sql_grammar\x18L \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_correlated_subqueries\x18M \x01(\x08:\x05\x66\x61lse\x12J\n;supports_data_definition_and_data_manipulation_transactions\x18N \x01(\x08:\x05\x66\x61lse\x12;\n,supports_data_manipulation_transactions_only\x18O \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_different_table_correlation_names\x18P \x01(\x08:\x05\x66\x61lse\x12/\n supports_expressions_in_order_by\x18Q \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_extended_sql_grammar\x18R \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_full_outer_joins\x18S \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_get_generated_keys\x18T \x01(\x08:\x05\x66\x61lse\x12 \n\x11supports_group_by\x18U \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_group_by_beyond_select\x18V \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_group_by_unrelated\x18W \x01(\x08:\x05\x66\x61lse\x12\x36\n\'supports_integrity_enhancement_facility\x18X \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_like_escape_clause\x18Y \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_limited_outer_joins\x18Z \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_minimum_sql_grammar\x18[ \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_mixed_case_identifiers\x18\\ \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_mixed_case_quoted_identifiers\x18] \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_open_results\x18^ \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_multiple_result_sets\x18_ \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_transactions\x18` \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_named_parameters\x18\x61 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_non_nullable_columns\x18\x62 \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_open_cursors_across_commit\x18\x63 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_open_cursors_across_rollback\x18\x64 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_open_statements_across_commit\x18\x65 \x01(\x08:\x05\x66\x61lse\x12\x37\n(supports_open_statements_across_rollback\x18\x66 \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_order_by_unrelated\x18g \x01(\x08:\x05\x66\x61lse\x12#\n\x14supports_outer_joins\x18h \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_delete\x18i \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_update\x18j \x01(\x08:\x05\x66\x61lse\x12\"\n\x13supports_savepoints\x18k \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_data_manipulation\x18l \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_index_definitions\x18m \x01(\x08:\x05\x66\x61lse\x12\x38\n)supports_schemas_in_privilege_definitions\x18n \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_schemas_in_procedure_calls\x18o \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_table_definitions\x18p \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_select_for_update\x18q \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_statement_pooling\x18r \x01(\x08:\x05\x66\x61lse\x12:\n+supports_stored_functions_using_call_syntax\x18s \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_stored_procedures\x18t \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_comparisons\x18u \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_subqueries_in_exists\x18v \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_subqueries_in_ins\x18w \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_quantifieds\x18x \x01(\x08:\x05\x66\x61lse\x12/\n supports_table_correlation_names\x18y \x01(\x08:\x05\x66\x61lse\x12$\n\x15supports_transactions\x18z \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0esupports_union\x18{ \x01(\x08:\x05\x66\x61lse\x12!\n\x12supports_union_all\x18| \x01(\x08:\x05\x66\x61lse\x12(\n\x19uses_local_file_per_table\x18} \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10uses_local_files\x18~ \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x7f \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0btable_types\x18\x80\x01 \x03(\t\x12\x11\n\x08\x63\x61talogs\x18\x81\x01 \x03(\t\x12;\n\x07schemas\x18\x82\x01 \x03(\x0b\x32).speckle.JdbcDatabaseMetaDataProto.Schema\x12\x35\n\x14\x64\x65letes_are_detected\x18\x83\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14inserts_are_detected\x18\x84\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14updates_are_detected\x18\x85\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_deletes_are_visible\x18\x86\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_inserts_are_visible\x18\x87\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_updates_are_visible\x18\x88\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_deletes_are_visible\x18\x89\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_inserts_are_visible\x18\x8a\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_updates_are_visible\x18\x8b\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12J\n)supports_result_set_concurrency_updatable\x18\x8c\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x39\n\x18supports_result_set_type\x18\x8d\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12G\n\x1fsupports_result_set_holdability\x18\x8e\x01 \x03(\x0e\x32\x1d.speckle.ResultSetHoldability\x12Q\n$supports_transaction_isolation_level\x18\x8f\x01 \x03(\x0e\x32\".speckle.TransactionIsolationLevel\x12-\n\x1dgenerated_key_always_returned\x18\x90\x01 \x01(\x08:\x05\x66\x61lse\x1a\x35\n\x06Schema\x12\x14\n\x0ctable_schema\x18\x01 \x01(\t\x12\x15\n\rtable_catalog\x18\x02 \x01(\t\"\xd2\x01\n\rRowIdLifetime\x12#\n\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x10\x00\x12%\n!ROWIDLIFETIME_ROWID_VALID_FOREVER\x10\x01\x12#\n\x1fROWIDLIFETIME_ROWID_VALID_OTHER\x10\x02\x12%\n!ROWIDLIFETIME_ROWID_VALID_SESSION\x10\x03\x12)\n%ROWIDLIFETIME_ROWID_VALID_TRANSACTION\x10\x04\"r\n\x0cSqlStateType\x12\x1e\n\x1aSQLSTATETYPE_SQL_STATE_SQL\x10\x00\x12 \n\x1cSQLSTATETYPE_SQL_STATE_SQL99\x10\x01\x12 \n\x1cSQLSTATETYPE_SQL_STATE_XOPEN\x10\x02\"&\n\x08Property\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xd6\x03\n\x0b\x45xecOptions\x12%\n\x16include_generated_keys\x18\x01 \x01(\x08:\x05\x66\x61lse\x12 \n\x18generated_column_indices\x18\x02 \x03(\x05\x12\x1e\n\x16generated_column_names\x18\x03 \x03(\t\x12$\n\x04type\x18\x04 \x01(\x0e\x32\x16.speckle.ResultSetType\x12\x32\n\x0b\x63oncurrency\x18\x05 \x01(\x0e\x32\x1d.speckle.ResultSetConcurrency\x12\x32\n\x0bholdability\x18\x06 \x01(\x0e\x32\x1d.speckle.ResultSetHoldability\x12\x12\n\nfetch_size\x18\x07 \x01(\x05\x12\x10\n\x08max_rows\x18\x08 \x01(\x05\x12\x17\n\x08poolable\x18\t \x01(\x08:\x05\x66\x61lse\x12?\n\x0f\x66\x65tch_direction\x18\n \x01(\x0e\x32\x17.speckle.FetchDirection:\rFETCH_FORWARD\x12\x13\n\x0b\x63ursor_name\x18\x0b \x01(\t\x12\x19\n\x0emax_field_size\x18\x0c \x01(\x05:\x01\x30\x12 \n\x11\x65scape_processing\x18\r \x01(\x08:\x05\x66\x61lse\"K\n\x16\x42\x61tchBindVariableProto\x12\x31\n\rbind_variable\x18\x01 \x03(\x0b\x32\x1a.speckle.BindVariableProto\"]\n\nBatchProto\x12\x11\n\tstatement\x18\x01 \x03(\t\x12<\n\x13\x62\x61tch_bind_variable\x18\x02 \x03(\x0b\x32\x1f.speckle.BatchBindVariableProto\"!\n\x11ParameterMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\rRpcErrorProto\x12\x12\n\nerror_code\x18\x01 \x01(\x05\x12\x15\n\rerror_message\x18\x02 \x01(\t*\xb4\x02\n\x19TransactionIsolationLevel\x12.\n*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x10\x00\x12\x38\n4TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_COMMITTED\x10\x02\x12:\n6TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_UNCOMMITTED\x10\x01\x12\x39\n5TRANSACTIONISOLATIONLEVEL_TRANSACTION_REPEATABLE_READ\x10\x04\x12\x36\n2TRANSACTIONISOLATIONLEVEL_TRANSACTION_SERIALIZABLE\x10\x08*\x8b\x01\n\rResultSetType\x12$\n\x1fRESULTSETTYPE_TYPE_FORWARD_ONLY\x10\xeb\x07\x12*\n%RESULTSETTYPE_TYPE_SCROLL_INSENSITIVE\x10\xec\x07\x12(\n#RESULTSETTYPE_TYPE_SCROLL_SENSITIVE\x10\xed\x07*n\n\x14ResultSetConcurrency\x12*\n%RESULTSETCONCURRENCY_CONCUR_READ_ONLY\x10\xef\x07\x12*\n%RESULTSETCONCURRENCY_CONCUR_UPDATABLE\x10\xf0\x07*{\n\x14ResultSetHoldability\x12\x31\n-RESULTSETHOLDABILITY_HOLD_CURSORS_OVER_COMMIT\x10\x01\x12\x30\n,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x10\x02*L\n\x0e\x46\x65tchDirection\x12\x12\n\rFETCH_FORWARD\x10\xe8\x07\x12\x12\n\rFETCH_REVERSE\x10\xe9\x07\x12\x12\n\rFETCH_UNKNOWN\x10\xea\x07*\xc4\t\n\x0cMetadataType\x12(\n$METADATATYPE_DATABASE_METADATA_BASIC\x10\x01\x12-\n)METADATATYPE_DATABASE_METADATA_GET_TABLES\x10\x02\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_PROCEDURES\x10\x03\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_PROCEDURE_COLUMNS\x10\x04\x12.\n*METADATATYPE_DATABASE_METADATA_GET_COLUMNS\x10\x05\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_COLUMN_PRIVILEGES\x10\x06\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_TABLE_PRIVILEGES\x10\x07\x12:\n6METADATATYPE_DATABASE_METADATA_GET_BEST_ROW_IDENTIFIER\x10\x08\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_VERSION_COLUMNS\x10\t\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_PRIMARY_KEYS\x10\n\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_IMPORTED_KEYS\x10\x0b\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_EXPORTED_KEYS\x10\x0c\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_CROSS_REFERENCE\x10\r\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_INDEX_INFO\x10\x0e\x12+\n\'METADATATYPE_DATABASE_METADATA_GET_UDTS\x10\x0f\x12\x32\n.METADATATYPE_DATABASE_METADATA_GET_SUPER_TYPES\x10\x10\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_SUPER_TABLES\x10\x11\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_ATTRIBUTES\x10\x12\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_FUNCTIONS\x10\x13\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_FUNCTION_COLUMNS\x10\x14\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_TYPE_INFO\x10\x15\x12.\n*METADATATYPE_DATABASE_METADATA_GET_SCHEMAS\x10\x16\x12\x35\n1METADATATYPE_DATABASE_METADATA_GET_PSEUDO_COLUMNS\x10\x17\x42%\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02xd')
+ serialized_pb='\n\"storage/speckle/proto/client.proto\x12\x07speckle\"\xb6\x01\n\x11\x42indVariableProto\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x05\x12\x10\n\x08position\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12;\n\tdirection\x18\x05 \x01(\x0e\x32$.speckle.BindVariableProto.Direction:\x02IN\"\'\n\tDirection\x12\x06\n\x02IN\x10\x01\x12\x07\n\x03OUT\x10\x02\x12\t\n\x05INOUT\x10\x03\"\x8c\x03\n\x0bResultProto\x12\"\n\x04rows\x18\x01 \x01(\x0b\x32\x14.speckle.RowSetProto\x12\x14\n\x0crows_updated\x18\x02 \x01(\x03\x12\x16\n\x0egenerated_keys\x18\x03 \x03(\x0c\x12\'\n\x08warnings\x18\x04 \x03(\x0b\x32\x15.speckle.SqlException\x12,\n\rsql_exception\x18\x05 \x01(\x0b\x32\x15.speckle.SqlException\x12\x14\n\x0cstatement_id\x18\x06 \x01(\x04\x12\x18\n\tmore_rows\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cmore_results\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x33\n\x0foutput_variable\x18\t \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x1a\n\x12\x62\x61tch_rows_updated\x18\n \x03(\x03\x12\x36\n\x12parameter_metadata\x18\x0b \x03(\x0b\x32\x1a.speckle.ParameterMetadata\"\xf1\x05\n\x07OpProto\x12%\n\x04type\x18\x01 \x02(\x0e\x32\x17.speckle.OpProto.OpType\x12\x0f\n\x07\x63\x61talog\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\x12%\n\tsavepoint\x18\x04 \x01(\x0b\x32\x12.speckle.SavePoint\x12\x13\n\x0b\x61uto_commit\x18\x05 \x01(\x08\x12\x11\n\tread_only\x18\x06 \x01(\x08\x12G\n\x1btransaction_isolation_level\x18\x07 \x01(\x0e\x32\".speckle.TransactionIsolationLevel\x12\x14\n\x0cstatement_id\x18\x08 \x01(\x04\x12\x12\n\nrequest_id\x18\t \x01(\x04\"\xde\x03\n\x06OpType\x12\x0e\n\nNATIVE_SQL\x10\x01\x12\x0c\n\x08ROLLBACK\x10\x02\x12\x11\n\rSET_SAVEPOINT\x10\x03\x12\x13\n\x0fSET_AUTO_COMMIT\x10\x04\x12\x11\n\rSET_READ_ONLY\x10\x05\x12#\n\x1fSET_TRANSACTION_ISOLATION_LEVEL\x10\x06\x12\n\n\x06\x43OMMIT\x10\x07\x12\x0f\n\x0bSET_CATALOG\x10\x08\x12\x13\n\x0f\x43LOSE_STATEMENT\x10\t\x12\x08\n\x04PING\x10\n\x12\x0f\n\x0bNEXT_RESULT\x10\x0b\x12\t\n\x05RETRY\x10\x0c\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE13\x10\r\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE14\x10\x0e\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE15\x10\x0f\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE16\x10\x10\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE17\x10\x11\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE18\x10\x12\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE19\x10\x13\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE20\x10\x14\"%\n\tSavePoint\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x02(\t\"c\n\x0cSqlException\x12\x0f\n\x07message\x18\x01 \x02(\t\x12\x0f\n\x04\x63ode\x18\x02 \x02(\x05:\x01\x30\x12\x11\n\tsql_state\x18\x03 \x01(\t\x12\x1e\n\x16\x61pplication_error_code\x18\x04 \x01(\x05\"+\n\nTupleProto\x12\x0e\n\x06values\x18\x01 \x03(\x0c\x12\r\n\x05nulls\x18\x02 \x03(\x05\"\xc0\x03\n\x0b\x43olumnProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12\x10\n\x04type\x18\x03 \x01(\x05:\x02\x31\x32\x12\x12\n\ntable_name\x18\x04 \x01(\t\x12\x13\n\x0bschema_name\x18\x05 \x01(\t\x12\x14\n\x0c\x63\x61talog_name\x18\x06 \x01(\t\x12\x14\n\tprecision\x18\x07 \x01(\x05:\x01\x30\x12\x10\n\x05scale\x18\x08 \x01(\x05:\x01\x30\x12\x10\n\x08nullable\x18\t \x01(\x08\x12\x12\n\nsearchable\x18\n \x01(\x08\x12\x14\n\x0c\x64isplay_size\x18\x0b \x01(\x05\x12\x1d\n\x0e\x61uto_increment\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63\x61se_sensitive\x18\r \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08\x63urrency\x18\x0e \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x64\x65\x66initely_writable\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x10 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x06signed\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08writable\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x10\x63olumn_type_name\x18\x13 \x01(\t:\x00\"Y\n\x0bRowSetProto\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.speckle.ColumnProto\x12#\n\x06tuples\x18\x02 \x03(\x0b\x32\x13.speckle.TupleProto\"\xcb\x36\n\x19JdbcDatabaseMetaDataProto\x12*\n\x1b\x61ll_procedures_are_callable\x18\x01 \x01(\x08:\x05\x66\x61lse\x12(\n\x19\x61ll_tables_are_selectable\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x39\n*auto_commit_failure_closes_all_result_sets\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x38\n)data_definition_causes_transaction_commit\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x36\n\'data_definition_ignored_in_transactions\x18\x05 \x01(\x08:\x05\x66\x61lse\x12.\n\x1f\x64oes_max_row_size_include_blobs\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x11\x63\x61talog_separator\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61talog_term\x18\x08 \x01(\t\x12!\n\x16\x64\x61tabase_major_version\x18\t \x01(\x05:\x01\x30\x12!\n\x16\x64\x61tabase_minor_version\x18\n \x01(\x05:\x01\x30\x12&\n\x15\x64\x61tabase_product_name\x18\x0b \x01(\t:\x07Speckle\x12\"\n\x18\x64\x61tabase_product_version\x18\x0c \x01(\t:\x00\x12u\n\x1d\x64\x65\x66\x61ult_transaction_isolation\x18\r \x01(\x0e\x32\".speckle.TransactionIsolationLevel:*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x12\x1f\n\x15\x65xtra_name_characters\x18\x0e \x01(\t:\x00\x12!\n\x17identifier_quote_string\x18\x0f \x01(\t:\x00\x12\x1d\n\x12jdbc_major_version\x18\x10 \x01(\x05:\x01\x31\x12\x1d\n\x12jdbc_minor_version\x18\x11 \x01(\x05:\x01\x30\x12$\n\x19max_binary_literal_length\x18\x12 \x01(\x05:\x01\x30\x12\"\n\x17max_catalog_name_length\x18\x13 \x01(\x05:\x01\x30\x12\"\n\x17max_char_literal_length\x18\x14 \x01(\x05:\x01\x30\x12!\n\x16max_column_name_length\x18\x15 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_group_by\x18\x16 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_index\x18\x17 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_order_by\x18\x18 \x01(\x05:\x01\x30\x12 \n\x15max_columns_in_select\x18\x19 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_table\x18\x1a \x01(\x05:\x01\x30\x12\x1a\n\x0fmax_connections\x18\x1b \x01(\x05:\x01\x30\x12!\n\x16max_cursor_name_length\x18\x1c \x01(\x05:\x01\x30\x12\x1b\n\x10max_index_length\x18\x1d \x01(\x05:\x01\x30\x12$\n\x19max_procedure_name_length\x18\x1e \x01(\x05:\x01\x30\x12\x17\n\x0cmax_row_size\x18\x1f \x01(\x05:\x01\x30\x12!\n\x16max_schema_name_length\x18 \x01(\x05:\x01\x30\x12\x1f\n\x14max_statement_length\x18! \x01(\x05:\x01\x30\x12\x19\n\x0emax_statements\x18\" \x01(\x05:\x01\x30\x12 \n\x15max_table_name_length\x18# \x01(\x05:\x01\x30\x12\x1f\n\x14max_tables_in_select\x18$ \x01(\x05:\x01\x30\x12\x1f\n\x14max_user_name_length\x18% \x01(\x05:\x01\x30\x12\x1b\n\x11numeric_functions\x18& \x01(\t:\x00\x12\x18\n\x0eprocedure_term\x18\' \x01(\t:\x00\x12j\n\x15resultset_holdability\x18( \x01(\x0e\x32\x1d.speckle.ResultSetHoldability:,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x12i\n\x0erowid_lifetime\x18) \x01(\x0e\x32\x30.speckle.JdbcDatabaseMetaDataProto.RowIdLifetime:\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x12\x14\n\x0csql_keywords\x18* \x01(\t\x12\x63\n\x0esql_state_type\x18+ \x01(\x0e\x32/.speckle.JdbcDatabaseMetaDataProto.SqlStateType:\x1aSQLSTATETYPE_SQL_STATE_SQL\x12\x15\n\x0bschema_term\x18, \x01(\t:\x00\x12\x1c\n\x14search_string_escape\x18- \x01(\t\x12\x1a\n\x10string_functions\x18. \x01(\t:\x00\x12\x1a\n\x10system_functions\x18/ \x01(\t:\x00\x12\x1d\n\x13time_date_functions\x18\x30 \x01(\t:\x00\x12\x13\n\tuser_name\x18\x31 \x01(\t:\x00\x12\x1f\n\x10\x63\x61talog_at_start\x18\x32 \x01(\x08:\x05\x66\x61lse\x12#\n\x14locators_update_copy\x18\x33 \x01(\x08:\x05\x66\x61lse\x12)\n\x1anull_plus_non_null_is_null\x18\x34 \x01(\x08:\x05\x66\x61lse\x12&\n\x17nulls_are_sorted_at_end\x18\x35 \x01(\x08:\x05\x66\x61lse\x12(\n\x19nulls_are_sorted_at_start\x18\x36 \x01(\x08:\x05\x66\x61lse\x12$\n\x15nulls_are_sorted_high\x18\x37 \x01(\x08:\x05\x66\x61lse\x12#\n\x14nulls_are_sorted_low\x18\x38 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_lower_case_identifiers\x18\x39 \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_lower_case_quoted_identifiers\x18: \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_mixed_case_identifiers\x18; \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_mixed_case_quoted_identifiers\x18< \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_upper_case_identifiers\x18= \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_upper_case_quoted_identifiers\x18> \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_ansi92_entry_level_sql\x18? \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_ansi92_full_sql\x18@ \x01(\x08:\x05\x66\x61lse\x12/\n supports_ansi92_intermediate_sql\x18\x41 \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_alter_table_with_add_column\x18\x42 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_alter_table_with_drop_column\x18\x43 \x01(\x08:\x05\x66\x61lse\x12%\n\x16supports_batch_updates\x18\x44 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_data_manipulation\x18\x45 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_index_definitions\x18\x46 \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_catalogs_in_privilege_definitions\x18G \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_catalogs_in_procedure_calls\x18H \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_table_definitions\x18I \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_column_aliasing\x18J \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10supports_convert\x18K \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_core_sql_grammar\x18L \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_correlated_subqueries\x18M \x01(\x08:\x05\x66\x61lse\x12J\n;supports_data_definition_and_data_manipulation_transactions\x18N \x01(\x08:\x05\x66\x61lse\x12;\n,supports_data_manipulation_transactions_only\x18O \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_different_table_correlation_names\x18P \x01(\x08:\x05\x66\x61lse\x12/\n supports_expressions_in_order_by\x18Q \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_extended_sql_grammar\x18R \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_full_outer_joins\x18S \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_get_generated_keys\x18T \x01(\x08:\x05\x66\x61lse\x12 \n\x11supports_group_by\x18U \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_group_by_beyond_select\x18V \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_group_by_unrelated\x18W \x01(\x08:\x05\x66\x61lse\x12\x36\n\'supports_integrity_enhancement_facility\x18X \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_like_escape_clause\x18Y \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_limited_outer_joins\x18Z \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_minimum_sql_grammar\x18[ \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_mixed_case_identifiers\x18\\ \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_mixed_case_quoted_identifiers\x18] \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_open_results\x18^ \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_multiple_result_sets\x18_ \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_transactions\x18` \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_named_parameters\x18\x61 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_non_nullable_columns\x18\x62 \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_open_cursors_across_commit\x18\x63 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_open_cursors_across_rollback\x18\x64 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_open_statements_across_commit\x18\x65 \x01(\x08:\x05\x66\x61lse\x12\x37\n(supports_open_statements_across_rollback\x18\x66 \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_order_by_unrelated\x18g \x01(\x08:\x05\x66\x61lse\x12#\n\x14supports_outer_joins\x18h \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_delete\x18i \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_update\x18j \x01(\x08:\x05\x66\x61lse\x12\"\n\x13supports_savepoints\x18k \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_data_manipulation\x18l \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_index_definitions\x18m \x01(\x08:\x05\x66\x61lse\x12\x38\n)supports_schemas_in_privilege_definitions\x18n \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_schemas_in_procedure_calls\x18o \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_table_definitions\x18p \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_select_for_update\x18q \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_statement_pooling\x18r \x01(\x08:\x05\x66\x61lse\x12:\n+supports_stored_functions_using_call_syntax\x18s \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_stored_procedures\x18t \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_comparisons\x18u \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_subqueries_in_exists\x18v \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_subqueries_in_ins\x18w \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_quantifieds\x18x \x01(\x08:\x05\x66\x61lse\x12/\n supports_table_correlation_names\x18y \x01(\x08:\x05\x66\x61lse\x12$\n\x15supports_transactions\x18z \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0esupports_union\x18{ \x01(\x08:\x05\x66\x61lse\x12!\n\x12supports_union_all\x18| \x01(\x08:\x05\x66\x61lse\x12(\n\x19uses_local_file_per_table\x18} \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10uses_local_files\x18~ \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x7f \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0btable_types\x18\x80\x01 \x03(\t\x12\x11\n\x08\x63\x61talogs\x18\x81\x01 \x03(\t\x12;\n\x07schemas\x18\x82\x01 \x03(\x0b\x32).speckle.JdbcDatabaseMetaDataProto.Schema\x12\x35\n\x14\x64\x65letes_are_detected\x18\x83\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14inserts_are_detected\x18\x84\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14updates_are_detected\x18\x85\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_deletes_are_visible\x18\x86\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_inserts_are_visible\x18\x87\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_updates_are_visible\x18\x88\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_deletes_are_visible\x18\x89\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_inserts_are_visible\x18\x8a\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_updates_are_visible\x18\x8b\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12J\n)supports_result_set_concurrency_updatable\x18\x8c\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x39\n\x18supports_result_set_type\x18\x8d\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12G\n\x1fsupports_result_set_holdability\x18\x8e\x01 \x03(\x0e\x32\x1d.speckle.ResultSetHoldability\x12Q\n$supports_transaction_isolation_level\x18\x8f\x01 \x03(\x0e\x32\".speckle.TransactionIsolationLevel\x12-\n\x1dgenerated_key_always_returned\x18\x90\x01 \x01(\x08:\x05\x66\x61lse\x1a\x35\n\x06Schema\x12\x14\n\x0ctable_schema\x18\x01 \x01(\t\x12\x15\n\rtable_catalog\x18\x02 \x01(\t\"\xd2\x01\n\rRowIdLifetime\x12#\n\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x10\x00\x12%\n!ROWIDLIFETIME_ROWID_VALID_FOREVER\x10\x01\x12#\n\x1fROWIDLIFETIME_ROWID_VALID_OTHER\x10\x02\x12%\n!ROWIDLIFETIME_ROWID_VALID_SESSION\x10\x03\x12)\n%ROWIDLIFETIME_ROWID_VALID_TRANSACTION\x10\x04\"r\n\x0cSqlStateType\x12\x1e\n\x1aSQLSTATETYPE_SQL_STATE_SQL\x10\x00\x12 \n\x1cSQLSTATETYPE_SQL_STATE_SQL99\x10\x01\x12 \n\x1cSQLSTATETYPE_SQL_STATE_XOPEN\x10\x02\"&\n\x08Property\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xd6\x03\n\x0b\x45xecOptions\x12%\n\x16include_generated_keys\x18\x01 \x01(\x08:\x05\x66\x61lse\x12 \n\x18generated_column_indices\x18\x02 \x03(\x05\x12\x1e\n\x16generated_column_names\x18\x03 \x03(\t\x12$\n\x04type\x18\x04 \x01(\x0e\x32\x16.speckle.ResultSetType\x12\x32\n\x0b\x63oncurrency\x18\x05 \x01(\x0e\x32\x1d.speckle.ResultSetConcurrency\x12\x32\n\x0bholdability\x18\x06 \x01(\x0e\x32\x1d.speckle.ResultSetHoldability\x12\x12\n\nfetch_size\x18\x07 \x01(\x05\x12\x10\n\x08max_rows\x18\x08 \x01(\x05\x12\x17\n\x08poolable\x18\t \x01(\x08:\x05\x66\x61lse\x12?\n\x0f\x66\x65tch_direction\x18\n \x01(\x0e\x32\x17.speckle.FetchDirection:\rFETCH_FORWARD\x12\x13\n\x0b\x63ursor_name\x18\x0b \x01(\t\x12\x19\n\x0emax_field_size\x18\x0c \x01(\x05:\x01\x30\x12 \n\x11\x65scape_processing\x18\r \x01(\x08:\x05\x66\x61lse\"K\n\x16\x42\x61tchBindVariableProto\x12\x31\n\rbind_variable\x18\x01 \x03(\x0b\x32\x1a.speckle.BindVariableProto\"]\n\nBatchProto\x12\x11\n\tstatement\x18\x01 \x03(\t\x12<\n\x13\x62\x61tch_bind_variable\x18\x02 \x03(\x0b\x32\x1f.speckle.BatchBindVariableProto\"!\n\x11ParameterMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\rRpcErrorProto\x12\x12\n\nerror_code\x18\x01 \x01(\x05\x12\x15\n\rerror_message\x18\x02 \x01(\t*\xb4\x02\n\x19TransactionIsolationLevel\x12.\n*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x10\x00\x12\x38\n4TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_COMMITTED\x10\x02\x12:\n6TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_UNCOMMITTED\x10\x01\x12\x39\n5TRANSACTIONISOLATIONLEVEL_TRANSACTION_REPEATABLE_READ\x10\x04\x12\x36\n2TRANSACTIONISOLATIONLEVEL_TRANSACTION_SERIALIZABLE\x10\x08*\x8b\x01\n\rResultSetType\x12$\n\x1fRESULTSETTYPE_TYPE_FORWARD_ONLY\x10\xeb\x07\x12*\n%RESULTSETTYPE_TYPE_SCROLL_INSENSITIVE\x10\xec\x07\x12(\n#RESULTSETTYPE_TYPE_SCROLL_SENSITIVE\x10\xed\x07*n\n\x14ResultSetConcurrency\x12*\n%RESULTSETCONCURRENCY_CONCUR_READ_ONLY\x10\xef\x07\x12*\n%RESULTSETCONCURRENCY_CONCUR_UPDATABLE\x10\xf0\x07*{\n\x14ResultSetHoldability\x12\x31\n-RESULTSETHOLDABILITY_HOLD_CURSORS_OVER_COMMIT\x10\x01\x12\x30\n,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x10\x02*L\n\x0e\x46\x65tchDirection\x12\x12\n\rFETCH_FORWARD\x10\xe8\x07\x12\x12\n\rFETCH_REVERSE\x10\xe9\x07\x12\x12\n\rFETCH_UNKNOWN\x10\xea\x07*\xc4\t\n\x0cMetadataType\x12(\n$METADATATYPE_DATABASE_METADATA_BASIC\x10\x01\x12-\n)METADATATYPE_DATABASE_METADATA_GET_TABLES\x10\x02\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_PROCEDURES\x10\x03\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_PROCEDURE_COLUMNS\x10\x04\x12.\n*METADATATYPE_DATABASE_METADATA_GET_COLUMNS\x10\x05\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_COLUMN_PRIVILEGES\x10\x06\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_TABLE_PRIVILEGES\x10\x07\x12:\n6METADATATYPE_DATABASE_METADATA_GET_BEST_ROW_IDENTIFIER\x10\x08\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_VERSION_COLUMNS\x10\t\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_PRIMARY_KEYS\x10\n\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_IMPORTED_KEYS\x10\x0b\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_EXPORTED_KEYS\x10\x0c\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_CROSS_REFERENCE\x10\r\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_INDEX_INFO\x10\x0e\x12+\n\'METADATATYPE_DATABASE_METADATA_GET_UDTS\x10\x0f\x12\x32\n.METADATATYPE_DATABASE_METADATA_GET_SUPER_TYPES\x10\x10\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_SUPER_TABLES\x10\x11\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_ATTRIBUTES\x10\x12\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_FUNCTIONS\x10\x13\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_FUNCTION_COLUMNS\x10\x14\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_TYPE_INFO\x10\x15\x12.\n*METADATATYPE_DATABASE_METADATA_GET_SCHEMAS\x10\x16\x12\x35\n1METADATATYPE_DATABASE_METADATA_GET_PSEUDO_COLUMNS\x10\x17*\xd4\x02\n\nClientType\x12\x19\n\x15\x43LIENT_TYPE_JAVA_JDBC\x10\x01\x12\x1c\n\x18\x43LIENT_TYPE_PYTHON_DBAPI\x10\x02\x12\x17\n\x13\x43LIENT_TYPE_UNKNOWN\x10\x03\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE4\x10\x04\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE5\x10\x05\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE6\x10\x06\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE7\x10\x07\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE8\x10\x08\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE9\x10\t\x12\"\n\x1e\x43LIENT_TYPE_UNKNOWN_LANGUAGE10\x10\nB%\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02xd')
_TRANSACTIONISOLATIONLEVEL = descriptor.EnumDescriptor(
name='TransactionIsolationLevel',
@@ -266,6 +266,60 @@
)
+_CLIENTTYPE = descriptor.EnumDescriptor(
+ name='ClientType',
+ full_name='speckle.ClientType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_JAVA_JDBC', index=0, number=1,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_PYTHON_DBAPI', index=1, number=2,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN', index=2, number=3,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE4', index=3, number=4,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE5', index=4, number=5,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE6', index=5, number=6,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE7', index=6, number=7,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE8', index=7, number=8,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE9', index=8, number=9,
+ options=None,
+ type=None),
+ descriptor.EnumValueDescriptor(
+ name='CLIENT_TYPE_UNKNOWN_LANGUAGE10', index=9, number=10,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=11876,
+ serialized_end=12216,
+)
+
+
TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE = 0
TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_COMMITTED = 2
TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_UNCOMMITTED = 1
@@ -304,6 +358,16 @@
METADATATYPE_DATABASE_METADATA_GET_TYPE_INFO = 21
METADATATYPE_DATABASE_METADATA_GET_SCHEMAS = 22
METADATATYPE_DATABASE_METADATA_GET_PSEUDO_COLUMNS = 23
+CLIENT_TYPE_JAVA_JDBC = 1
+CLIENT_TYPE_PYTHON_DBAPI = 2
+CLIENT_TYPE_UNKNOWN = 3
+CLIENT_TYPE_UNKNOWN_LANGUAGE4 = 4
+CLIENT_TYPE_UNKNOWN_LANGUAGE5 = 5
+CLIENT_TYPE_UNKNOWN_LANGUAGE6 = 6
+CLIENT_TYPE_UNKNOWN_LANGUAGE7 = 7
+CLIENT_TYPE_UNKNOWN_LANGUAGE8 = 8
+CLIENT_TYPE_UNKNOWN_LANGUAGE9 = 9
+CLIENT_TYPE_UNKNOWN_LANGUAGE10 = 10
_BINDVARIABLEPROTO_DIRECTION = descriptor.EnumDescriptor(
diff --git a/google/storage/speckle/proto/sql_pb2.py b/google/storage/speckle/proto/sql_pb2.py
index ff54328..be86035 100755
--- a/google/storage/speckle/proto/sql_pb2.py
+++ b/google/storage/speckle/proto/sql_pb2.py
@@ -44,7 +44,7 @@
DESCRIPTOR = descriptor.FileDescriptor(
name='storage/speckle/proto/sql.proto',
package='speckle.sql',
- serialized_pb='\n\x1fstorage/speckle/proto/sql.proto\x12\x0bspeckle.sql\x1a\"storage/speckle/proto/client.proto\"\x8c\x03\n\x0b\x45xecRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\x04\x12\x11\n\tstatement\x18\x03 \x01(\t\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12%\n\x07options\x18\x06 \x01(\x0b\x32\x14.speckle.ExecOptions\x12I\n\x0estatement_type\x18\t \x01(\x0e\x32&.speckle.sql.ExecRequest.StatementType:\tSTATEMENT\x12\"\n\x05\x62\x61tch\x18\n \x01(\x0b\x32\x13.speckle.BatchProto\x12\x12\n\nrequest_id\x18\x0b \x01(\x04\"N\n\rStatementType\x12\r\n\tSTATEMENT\x10\x01\x12\x16\n\x12PREPARED_STATEMENT\x10\x02\x12\x16\n\x12\x43\x41LLABLE_STATEMENT\x10\x03\"b\n\x0c\x45xecResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\"j\n\rExecOpRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\x12\x1c\n\x02op\x18\x03 \x02(\x0b\x32\x10.speckle.OpProto\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xed\x01\n\x0e\x45xecOpResponse\x12\x12\n\nnative_sql\x18\x01 \x01(\t\x12%\n\tsavepoint\x18\x02 \x01(\x0b\x32\x12.speckle.SavePoint\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\x12$\n\x06result\x18\x04 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x30\n\x10\x63\x61\x63hed_rpc_error\x18\x05 \x01(\x0b\x32\x16.speckle.RpcErrorProto\x12\x1a\n\x0e\x63\x61\x63hed_payload\x18\x06 \x01(\x0c\x42\x02\x08\x01\"\xaa\x01\n\x0fMetadataRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\'\n\x08metadata\x18\x03 \x02(\x0e\x32\x15.speckle.MetadataType\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xaa\x01\n\x10MetadataResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x42\n\x16jdbc_database_metadata\x18\x02 \x01(\x0b\x32\".speckle.JdbcDatabaseMetaDataProto\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\"k\n\x15OpenConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12#\n\x08property\x18\x02 \x03(\x0b\x32\x11.speckle.Property\x12\x1b\n\x10protocol_version\x18\x05 \x01(\x04:\x01\x31\"\x86\x01\n\x16OpenConnectionResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\x0c\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\x12\'\n\x08warnings\x18\x06 \x03(\x0b\x32\x15.speckle.SqlException\"A\n\x16\x43loseConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\"G\n\x17\x43loseConnectionResponse\x12,\n\rsql_exception\x18\x01 \x01(\x0b\x32\x15.speckle.SqlException2\xa5\x03\n\nSqlService\x12?\n\x04\x45xec\x12\x18.speckle.sql.ExecRequest\x1a\x19.speckle.sql.ExecResponse\"\x02P\x01\x12\x45\n\x06\x45xecOp\x12\x1a.speckle.sql.ExecOpRequest\x1a\x1b.speckle.sql.ExecOpResponse\"\x02P\x01\x12N\n\x0bGetMetadata\x12\x1c.speckle.sql.MetadataRequest\x1a\x1d.speckle.sql.MetadataResponse\"\x02P\x01\x12]\n\x0eOpenConnection\x12\".speckle.sql.OpenConnectionRequest\x1a#.speckle.sql.OpenConnectionResponse\"\x02P\x01\x12`\n\x0f\x43loseConnection\x12#.speckle.sql.CloseConnectionRequest\x1a$.speckle.sql.CloseConnectionResponse\"\x02P\x01\x42\x30\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02P\x01xd\x80\x01\x00\x88\x01\x00\x90\x01\x00')
+ serialized_pb='\n\x1fstorage/speckle/proto/sql.proto\x12\x0bspeckle.sql\x1a\"storage/speckle/proto/client.proto\"\x8c\x03\n\x0b\x45xecRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\x04\x12\x11\n\tstatement\x18\x03 \x01(\t\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12%\n\x07options\x18\x06 \x01(\x0b\x32\x14.speckle.ExecOptions\x12I\n\x0estatement_type\x18\t \x01(\x0e\x32&.speckle.sql.ExecRequest.StatementType:\tSTATEMENT\x12\"\n\x05\x62\x61tch\x18\n \x01(\x0b\x32\x13.speckle.BatchProto\x12\x12\n\nrequest_id\x18\x0b \x01(\x04\"N\n\rStatementType\x12\r\n\tSTATEMENT\x10\x01\x12\x16\n\x12PREPARED_STATEMENT\x10\x02\x12\x16\n\x12\x43\x41LLABLE_STATEMENT\x10\x03\"b\n\x0c\x45xecResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\"j\n\rExecOpRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\x12\x1c\n\x02op\x18\x03 \x02(\x0b\x32\x10.speckle.OpProto\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xed\x01\n\x0e\x45xecOpResponse\x12\x12\n\nnative_sql\x18\x01 \x01(\t\x12%\n\tsavepoint\x18\x02 \x01(\x0b\x32\x12.speckle.SavePoint\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\x12$\n\x06result\x18\x04 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x30\n\x10\x63\x61\x63hed_rpc_error\x18\x05 \x01(\x0b\x32\x16.speckle.RpcErrorProto\x12\x1a\n\x0e\x63\x61\x63hed_payload\x18\x06 \x01(\x0c\x42\x02\x08\x01\"\xaa\x01\n\x0fMetadataRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\'\n\x08metadata\x18\x03 \x02(\x0e\x32\x15.speckle.MetadataType\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xaa\x01\n\x10MetadataResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x42\n\x16jdbc_database_metadata\x18\x02 \x01(\x0b\x32\".speckle.JdbcDatabaseMetaDataProto\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\"\xac\x01\n\x15OpenConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12#\n\x08property\x18\x02 \x03(\x0b\x32\x11.speckle.Property\x12\x1b\n\x10protocol_version\x18\x05 \x01(\x04:\x01\x31\x12?\n\x0b\x63lient_type\x18\x06 \x01(\x0e\x32\x13.speckle.ClientType:\x15\x43LIENT_TYPE_JAVA_JDBC\"\x86\x01\n\x16OpenConnectionResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\x0c\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\x12\'\n\x08warnings\x18\x06 \x03(\x0b\x32\x15.speckle.SqlException\"A\n\x16\x43loseConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\"G\n\x17\x43loseConnectionResponse\x12,\n\rsql_exception\x18\x01 \x01(\x0b\x32\x15.speckle.SqlException2\xa5\x03\n\nSqlService\x12?\n\x04\x45xec\x12\x18.speckle.sql.ExecRequest\x1a\x19.speckle.sql.ExecResponse\"\x02P\x01\x12\x45\n\x06\x45xecOp\x12\x1a.speckle.sql.ExecOpRequest\x1a\x1b.speckle.sql.ExecOpResponse\"\x02P\x01\x12N\n\x0bGetMetadata\x12\x1c.speckle.sql.MetadataRequest\x1a\x1d.speckle.sql.MetadataResponse\"\x02P\x01\x12]\n\x0eOpenConnection\x12\".speckle.sql.OpenConnectionRequest\x1a#.speckle.sql.OpenConnectionResponse\"\x02P\x01\x12`\n\x0f\x43loseConnection\x12#.speckle.sql.CloseConnectionRequest\x1a$.speckle.sql.CloseConnectionResponse\"\x02P\x01\x42\x30\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02P\x01xd\x80\x01\x00\x88\x01\x00\x90\x01\x00')
@@ -432,6 +432,13 @@
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
+ descriptor.FieldDescriptor(
+ name='client_type', full_name='speckle.sql.OpenConnectionRequest.client_type', index=3,
+ number=6, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
],
extensions=[
],
@@ -441,8 +448,8 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1277,
- serialized_end=1384,
+ serialized_start=1278,
+ serialized_end=1450,
)
@@ -483,8 +490,8 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1387,
- serialized_end=1521,
+ serialized_start=1453,
+ serialized_end=1587,
)
@@ -518,8 +525,8 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1523,
- serialized_end=1588,
+ serialized_start=1589,
+ serialized_end=1654,
)
@@ -546,8 +553,8 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1590,
- serialized_end=1661,
+ serialized_start=1656,
+ serialized_end=1727,
)
_EXECREQUEST.fields_by_name['bind_variable'].message_type = google.storage.speckle.proto.client_pb2._BINDVARIABLEPROTO
@@ -568,6 +575,7 @@
_METADATARESPONSE.fields_by_name['jdbc_database_metadata'].message_type = google.storage.speckle.proto.client_pb2._JDBCDATABASEMETADATAPROTO
_METADATARESPONSE.fields_by_name['sql_exception'].message_type = google.storage.speckle.proto.client_pb2._SQLEXCEPTION
_OPENCONNECTIONREQUEST.fields_by_name['property'].message_type = google.storage.speckle.proto.client_pb2._PROPERTY
+_OPENCONNECTIONREQUEST.fields_by_name['client_type'].enum_type = google.storage.speckle.proto.client_pb2._CLIENTTYPE
_OPENCONNECTIONRESPONSE.fields_by_name['sql_exception'].message_type = google.storage.speckle.proto.client_pb2._SQLEXCEPTION
_OPENCONNECTIONRESPONSE.fields_by_name['warnings'].message_type = google.storage.speckle.proto.client_pb2._SQLEXCEPTION
_CLOSECONNECTIONRESPONSE.fields_by_name['sql_exception'].message_type = google.storage.speckle.proto.client_pb2._SQLEXCEPTION
diff --git a/google/storage/speckle/python/api/converters.py b/google/storage/speckle/python/api/converters.py
index 8bae9d8..745689d 100644
--- a/google/storage/speckle/python/api/converters.py
+++ b/google/storage/speckle/python/api/converters.py
@@ -168,4 +168,21 @@
jdbc_type.VARBINARY: Blob,
jdbc_type.LONGVARBINARY: Blob,
jdbc_type.BLOB: Blob,
+ jdbc_type.CLOB: Str2Unicode,
+ jdbc_type.NCLOB: Str2Unicode,
+ jdbc_type.NCHAR: Str2Unicode,
+ jdbc_type.NVARCHAR: Str2Unicode,
+ jdbc_type.LONGNVARCHAR: Str2Unicode,
+
+ jdbc_type.ARRAY: Str2Unicode,
+ jdbc_type.NULL: Str2Unicode,
+ jdbc_type.OTHER: Str2Unicode,
+ jdbc_type.JAVA_OBJECT: Str2Unicode,
+ jdbc_type.DISTINCT: Str2Unicode,
+ jdbc_type.STRUCT: Str2Unicode,
+ jdbc_type.REF: Str2Unicode,
+ jdbc_type.DATALINK: Str2Unicode,
+ jdbc_type.BOOLEAN: Str2Unicode,
+ jdbc_type.ROWID: Str2Unicode,
+ jdbc_type.SQLXML: Str2Unicode,
}
diff --git a/google/storage/speckle/python/api/rdbms.py b/google/storage/speckle/python/api/rdbms.py
index b6cd156..c3de6e3 100755
--- a/google/storage/speckle/python/api/rdbms.py
+++ b/google/storage/speckle/python/api/rdbms.py
@@ -30,6 +30,7 @@
import time
import types
+from google.storage.speckle.proto import client_error_code_pb2
from google.storage.speckle.proto import client_pb2
from google.storage.speckle.proto import jdbc_type
from google.storage.speckle.proto import sql_pb2
@@ -286,7 +287,8 @@
response = self._conn.MakeRequest('Exec', request)
result = response.result
if result.HasField('sql_exception'):
- raise DatabaseError(result.sql_exception.message)
+ raise DatabaseError('%d: %s' % (result.sql_exception.code,
+ result.sql_exception.message))
self._rows = collections.deque()
if result.rows.columns:
@@ -416,7 +418,8 @@
class Connection(object):
def __init__(self, dsn, instance, database=None, user='root', password=None,
- deadline_seconds=30.0, conv=None):
+ deadline_seconds=30.0, conv=None,
+ query_deadline_seconds=86400.0, retry_interval_seconds=30.0):
"""Creates a new SQL Service connection.
Args:
@@ -427,7 +430,8 @@
password: A string, database password.
deadline_seconds: A float, request deadline in seconds.
conv: A dict, maps types to a conversion function. See converters.py.
-
+ query_deadline_seconds: A float, query deadline in seconds.
+ retry_interval_seconds: A float, seconds to wait between each retry.
Raises:
OperationalError: Transport failure.
DatabaseError: Error from SQL Service server.
@@ -445,6 +449,8 @@
self._idempotent_request_id = 0
if not conv:
conv = converters.conversions
+ self._query_deadline_seconds = query_deadline_seconds
+ self._retry_interval_seconds = retry_interval_seconds
self.converter = {}
self.encoders = {}
for key, value in conv.items():
@@ -458,6 +464,7 @@
def OpenConnection(self):
"""Opens a connection to SQL Service."""
request = sql_pb2.OpenConnectionRequest()
+ request.client_type = client_pb2.CLIENT_TYPE_PYTHON_DBAPI
prop = request.property.add()
prop.key = 'autoCommit'
prop.value = 'false'
@@ -569,8 +576,9 @@
if stub_method in ('Exec', 'ExecOp', 'GetMetadata'):
self._idempotent_request_id += 1
request.request_id = self._idempotent_request_id
-
- response = self.MakeRequestImpl(stub_method, request)
+ response = self._MakeRetriableRequest(stub_method, request)
+ else:
+ response = self.MakeRequestImpl(stub_method, request)
if (hasattr(response, 'sql_exception') and
response.HasField('sql_exception')):
@@ -578,6 +586,111 @@
response.sql_exception.message))
return response
+ def _MakeRetriableRequest(self, stub_method, request):
+ """Makes a retriable request.
+
+ Args:
+ stub_method: A string, the name of the method to call.
+ request: A protobuf.
+
+ Returns:
+ A protobuf.
+
+ Raises:
+ DatabaseError: Error from SQL Service server.
+ """
+ absolute_deadline_seconds = time.clock() + self._query_deadline_seconds
+ response = self.MakeRequestImpl(stub_method, request)
+ if not response.HasField('sql_exception'):
+ return response
+ sql_exception = response.sql_exception
+ if (sql_exception.application_error_code !=
+ client_error_code_pb2.SqlServiceClientError.ERROR_TIMEOUT):
+ raise DatabaseError('%d: %s' % (sql_exception.code,
+ sql_exception.message))
+ if time.clock() >= absolute_deadline_seconds:
+ raise DatabaseError('%d: %s' % (sql_exception.code,
+ sql_exception.message))
+ return self._Retry(stub_method, request.request_id,
+ absolute_deadline_seconds)
+
+ def _Retry(self, stub_method, request_id, absolute_deadline_seconds):
+ """Retries request with the given request id.
+
+ Continues to retry until either the deadline has expired or the response
+ has been received.
+
+ Args:
+ stub_method: A string, the name of the original method that triggered the
+ retry.
+ request_id: An integer, the request id used in the original request
+ absolute_deadline_seconds: An integer, absolute deadline in seconds.
+
+ Returns:
+ A protobuf.
+
+ Raises:
+ DatabaseError: If the ExecOpResponse contains a SqlException that it not
+ related to retry.
+ InternalError: If the ExceOpResponse is not valid.
+ """
+ request = sql_pb2.ExecOpRequest()
+ request.op.type = client_pb2.OpProto.RETRY
+ request.op.request_id = request_id
+ request.connection_id = self._connection_id
+ request.instance = self._instance
+ while True:
+ seconds_remaining = absolute_deadline_seconds - time.clock()
+ if seconds_remaining <= 0:
+ raise InternalError('Request [%d] timed out' % (request_id))
+ time.sleep(min(self._retry_interval_seconds, seconds_remaining))
+ self._idempotent_request_id += 1
+ request.request_id = self._idempotent_request_id
+ response = self.MakeRequestImpl('ExecOp', request)
+ if not response.HasField('sql_exception'):
+ return self._ConvertCachedResponse(stub_method, response)
+ sql_exception = response.sql_exception
+ if (sql_exception.application_error_code !=
+ client_error_code_pb2.SqlServiceClientError.ERROR_RESPONSE_PENDING):
+ raise DatabaseError('%d: %s' % (response.sql_exception.code,
+ response.sql_exception.message))
+
+ def _ConvertCachedResponse(self, stub_method, exec_op_response):
+ """Converts the cached response or RPC error.
+
+ Args:
+ stub_method: A string, the name of the original method that triggered the
+ retry.
+ exec_op_response: A protobuf, the retry response that contains either the
+ RPC error or the cached response.
+
+ Returns:
+ A protobuf, the cached response.
+
+ Raises:
+ DatabaseError: If the cached response contains SqlException.
+ InternalError: If a cached RpcErrorProto exists.
+ """
+ if exec_op_response.HasField('cached_rpc_error'):
+ raise InternalError('%d: %s' % (
+ exec_op_response.cached_rpc_error.error_code,
+ exec_op_response.cached_rpc_error.error_message))
+ if not exec_op_response.HasField('cached_payload'):
+ raise InternalError('Invalid exec op response for retry request')
+ if stub_method == 'Exec':
+ response = sql_pb2.ExecResponse()
+ elif stub_method == 'ExecOp':
+ response = sql_pb2.ExecOpResponse()
+ elif stub_method == 'GetMetadata':
+ response = sql_pb2.MetadataResponse()
+ else:
+ raise InternalError('Found unexpected stub_method: %s' % (stub_method))
+ response.ParseFromString(exec_op_response.cached_payload)
+ if response.HasField('sql_exception'):
+ raise DatabaseError('%d: %s' % (response.sql_exception.code,
+ response.sql_exception.message))
+ return response
+
def MakeRequestImpl(self, stub_method, request):
raise InternalError('No transport defined. Try using rdbms_[transport]')
diff --git a/remote_api_shell.py b/remote_api_shell.py
index 8f18e20..e673b9c 100755
--- a/remote_api_shell.py
+++ b/remote_api_shell.py
@@ -50,7 +50,7 @@
os.path.join(DIR_PATH, 'lib', 'whoosh'),
os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
os.path.join(DIR_PATH, 'lib', 'simplejson'),
- os.path.join(DIR_PATH, 'lib', 'graphy'),
+ os.path.join(DIR_PATH, 'lib', 'google.appengine._internal.graphy'),
]