App Engine Python SDK version 1.9.6

git-svn-id: http://googleappengine.googlecode.com/svn/trunk/python@442 80f5ef21-4148-0410-bacc-cfb02402ada8
diff --git a/RELEASE_NOTES b/RELEASE_NOTES
index e7ed2e1..4f03ccb 100644
--- a/RELEASE_NOTES
+++ b/RELEASE_NOTES
@@ -3,6 +3,16 @@
 
 App Engine SDK - Release Notes
 
+Version 1.9.6
+
+Python
+==============================
+- Django 1.5.4 has been upgraded to 1.5.8.
+- Django 1.4.3 has been upgraded to 1.4.13.
+- Fixed an issue with taskqueue_stub.py _Group.GetQueuesAsDict() raising
+  TypeError intermittently.
+    https://code.google.com/p/googleappengine/issues/detail?id=10131
+
 Version 1.9.5
 
 Python
diff --git a/VERSION b/VERSION
index 39e06ea..a0b5296 100644
--- a/VERSION
+++ b/VERSION
@@ -1,5 +1,5 @@
-release: "1.9.5"
-timestamp: 1398815236
+release: "1.9.6"
+timestamp: 1400627765
 api_versions: ['1']
 supported_api_versions:
   python:
diff --git a/google/appengine/api/appinfo.py b/google/appengine/api/appinfo.py
index e910721..921ac21 100644
--- a/google/appengine/api/appinfo.py
+++ b/google/appengine/api/appinfo.py
@@ -257,6 +257,17 @@
 MAXIMUM_CONCURRENT_REQUEST = 'max_concurrent_requests'
 
 
+
+
+MIN_NUM_INSTANCES = 'min_num_instances'
+MAX_NUM_INSTANCES = 'max_num_instances'
+COOL_DOWN_PERIOD_SEC = 'cool_down_period_sec'
+CPU_UTILIZATION = 'cpu_utilization'
+CPU_UTILIZATION_UTILIZATION = 'target_utilization'
+CPU_UTILIZATION_AGGREGATION_WINDOW_LENGTH_SEC = 'aggregation_window_length_sec'
+
+
+
 INSTANCES = 'instances'
 
 
@@ -1258,6 +1269,17 @@
                 '", "'.join(supported_library.non_deprecated_versions)))
 
 
+class CpuUtilization(validation.Validated):
+  """Class representing the configuration of VM CPU utilization."""
+
+  ATTRIBUTES = {
+      CPU_UTILIZATION_UTILIZATION: validation.Optional(
+          validation.Range(1e-6, 1.0, float)),
+      CPU_UTILIZATION_AGGREGATION_WINDOW_LENGTH_SEC: validation.Optional(
+          validation.Range(1, sys.maxint)),
+  }
+
+
 class AutomaticScaling(validation.Validated):
   """Class representing automatic scaling settings in the AppInfoExternal."""
   ATTRIBUTES = {
@@ -1267,6 +1289,12 @@
       MAXIMUM_PENDING_LATENCY: validation.Optional(_PENDING_LATENCY_REGEX),
       MAXIMUM_CONCURRENT_REQUEST: validation.Optional(
           _CONCURRENT_REQUESTS_REGEX),
+
+      MIN_NUM_INSTANCES: validation.Optional(validation.Range(1, sys.maxint)),
+      MAX_NUM_INSTANCES: validation.Optional(validation.Range(1, sys.maxint)),
+      COOL_DOWN_PERIOD_SEC: validation.Optional(
+          validation.Range(60, sys.maxint, int)),
+      CPU_UTILIZATION: validation.Optional(CpuUtilization),
   }
 
 
@@ -1379,7 +1407,7 @@
 
 
 class VmHealthCheck(validation.Validated):
-  """Class representing the configuration of a single library."""
+  """Class representing the configuration of VM health check."""
 
   ATTRIBUTES = {
       ENABLE_HEALTH_CHECK: validation.Optional(validation.TYPE_BOOL),
diff --git a/google/appengine/api/search/QueryLexer.py b/google/appengine/api/search/QueryLexer.py
index 9fccaa3..8e0ae81 100644
--- a/google/appengine/api/search/QueryLexer.py
+++ b/google/appengine/api/search/QueryLexer.py
@@ -26,48 +26,48 @@
 
 
 FUNCTION=7
-GEO_POINT_FN=29
-FIX=30
-ESC=34
-OCTAL_ESC=36
-FUZZY=8
-NOT=27
-DISTANCE_FN=28
-AND=25
-ESCAPED_CHAR=40
-EOF=-1
-LPAREN=23
-HAS=22
-CHAR_SEQ=37
-QUOTE=33
-RPAREN=24
-START_CHAR=41
-ARGS=4
-DIGIT=38
-EQ=21
-NE=20
 T__43=43
-LESSTHAN=17
 GE=18
+LESSTHAN=17
+FIX=30
+GEO_POINT_FN=29
 T__44=44
 T__45=45
+ESC=34
 CONJUNCTION=5
 UNICODE_ESC=35
+FUZZY=8
+OCTAL_ESC=36
 HEX_DIGIT=42
 LITERAL=10
-VALUE=14
+NOT=27
 TEXT=32
+VALUE=14
+AND=25
+DISTANCE_FN=28
+EOF=-1
+ESCAPED_CHAR=40
 REWRITE=31
-SEQUENCE=13
+LPAREN=23
 DISJUNCTION=6
+SEQUENCE=13
+HAS=22
+RPAREN=24
+QUOTE=33
+CHAR_SEQ=37
 WS=15
+START_CHAR=41
 NEGATION=11
 OR=26
+ARGS=4
 GT=19
+DIGIT=38
+EQ=21
 GLOBAL=9
 LE=16
 MID_CHAR=39
 STRING=12
+NE=20
 
 
 class QueryLexer(Lexer):
diff --git a/google/appengine/api/search/QueryParser.py b/google/appengine/api/search/QueryParser.py
index 83418a8..1e23ab9 100644
--- a/google/appengine/api/search/QueryParser.py
+++ b/google/appengine/api/search/QueryParser.py
@@ -39,8 +39,8 @@
 NOT=27
 AND=25
 DISTANCE_FN=28
-EOF=-1
 ESCAPED_CHAR=40
+EOF=-1
 LPAREN=23
 HAS=22
 RPAREN=24
diff --git a/google/appengine/api/search/search.py b/google/appengine/api/search/search.py
index d5fd302..b3f692d 100644
--- a/google/appengine/api/search/search.py
+++ b/google/appengine/api/search/search.py
@@ -2728,7 +2728,7 @@
           query=Query('subject:first good',
               options=QueryOptions(limit=20,
                   cursor=Cursor(),
-                  sortOptions=SortOptions(
+                  sort_options=SortOptions(
                       expressions=[SortExpression(expression='subject')],
                       limit=1000),
                   returned_fields=['author', 'subject', 'summary'],
diff --git a/google/appengine/api/search/search_util.py b/google/appengine/api/search/search_util.py
index 2850e79..5f188f8 100644
--- a/google/appengine/api/search/search_util.py
+++ b/google/appengine/api/search/search_util.py
@@ -19,7 +19,6 @@
 """Provides utility methods used by modules in the FTS API stub."""
 
 
-
 import datetime
 import re
 
@@ -30,6 +29,9 @@
 
 DEFAULT_MAX_SNIPPET_LENGTH = 160
 
+EXPRESSION_RETURN_TYPE_TEXT = 1
+EXPRESSION_RETURN_TYPE_NUMERIC = 2
+
 TEXT_DOCUMENT_FIELD_TYPES = [
     document_pb.FieldValue.ATOM,
     document_pb.FieldValue.TEXT,
@@ -53,11 +55,29 @@
   """Indicates attempt to perform an action unsupported on the dev server."""
 
 
-def GetFieldInDocument(document, field_name):
-  """Find and return the first field with the provided name in the document."""
-  for f in document.field_list():
-    if f.name() == field_name:
-      return f
+def GetFieldInDocument(document, field_name, return_type=None):
+  """Find and return the field with the provided name and type."""
+  if return_type is not None:
+
+    field_list = [f for f in document.field_list() if f.name() == field_name]
+    field_types_dict = dict((f.value().type(), f) for f in field_list)
+    if return_type == EXPRESSION_RETURN_TYPE_TEXT:
+      if document_pb.FieldValue.HTML in field_types_dict:
+        return field_types_dict[document_pb.FieldValue.HTML]
+      if document_pb.FieldValue.ATOM in field_types_dict:
+        return field_types_dict[document_pb.FieldValue.ATOM]
+      return field_types_dict.get(document_pb.FieldValue.TEXT)
+    elif return_type == EXPRESSION_RETURN_TYPE_NUMERIC:
+      if document_pb.FieldValue.NUMBER in field_types_dict:
+        return field_types_dict[document_pb.FieldValue.NUMBER]
+      return field_types_dict.get(document_pb.FieldValue.DATE)
+    else:
+      return field_types_dict.get(return_type)
+  else:
+
+    for f in document.field_list():
+      if f.name() == field_name:
+        return f
   return None
 
 
diff --git a/google/appengine/api/search/simple_search_stub.py b/google/appengine/api/search/simple_search_stub.py
index 2085581..c680a52 100644
--- a/google/appengine/api/search/simple_search_stub.py
+++ b/google/appengine/api/search/simple_search_stub.py
@@ -507,10 +507,15 @@
 
   def _Sort(self, docs, search_params, query, score):
     """Return sorted docs with score or evaluated search_params as sort key."""
+
+
+
+    docs = sorted(docs, key=lambda doc: doc.document.order_id(), reverse=True)
+
     if not search_params.sort_spec_size():
       if score:
         return sorted(docs, key=lambda doc: doc.score, reverse=True)
-      return sorted(docs, key=lambda doc: doc.document.order_id(), reverse=True)
+      return docs
 
     def SortKey(scored_doc):
       """Return the sort key for a document based on the request parameters.
@@ -528,23 +533,36 @@
       """
       expr_vals = []
       for sort_spec in search_params.sort_spec_list():
-        if not (sort_spec.has_default_value_text() or
-                sort_spec.has_default_value_numeric()):
-          raise Exception('A default value must be specified for sorting.')
-        elif sort_spec.has_default_value_text():
-          default_value = sort_spec.default_value_text()
-        else:
-          default_value = sort_spec.default_value_numeric()
+        default_text = None
+        default_numeric = None
+        if sort_spec.has_default_value_text():
+          default_text = sort_spec.default_value_text()
+        if sort_spec.has_default_value_numeric():
+          default_numeric = sort_spec.default_value_numeric()
         try:
-          val = expression_evaluator.ExpressionEvaluator(
+          text_val = expression_evaluator.ExpressionEvaluator(
               scored_doc, self._inverted_index, True).ValueOf(
-                  sort_spec.sort_expression(), default_value=default_value)
+                  sort_spec.sort_expression(), default_value=default_text,
+                  return_type=search_util.EXPRESSION_RETURN_TYPE_TEXT)
+          num_val = expression_evaluator.ExpressionEvaluator(
+              scored_doc, self._inverted_index, True).ValueOf(
+                  sort_spec.sort_expression(), default_value=default_numeric,
+                  return_type=search_util.EXPRESSION_RETURN_TYPE_NUMERIC)
         except expression_evaluator.QueryExpressionEvaluationError, e:
           raise expression_evaluator.ExpressionEvaluationError(
               _FAILED_TO_PARSE_SEARCH_REQUEST % (query, e))
-        if isinstance(val, datetime.datetime):
-          val = search_util.EpochTime(val)
-        expr_vals.append(val)
+        if isinstance(num_val, datetime.datetime):
+          num_val = search_util.EpochTime(num_val)
+
+
+        elif isinstance(text_val, datetime.datetime):
+          num_val = search_util.EpochTime(text_val)
+
+        if text_val is None:
+          text_val = ''
+        if num_val is None:
+          num_val = 0
+        expr_vals.append([text_val, num_val])
       return tuple(expr_vals)
 
     def SortCmp(x, y):
diff --git a/google/appengine/api/search/stub/expression_evaluator.py b/google/appengine/api/search/stub/expression_evaluator.py
index 7d8127f..fde4ee8 100644
--- a/google/appengine/api/search/stub/expression_evaluator.py
+++ b/google/appengine/api/search/stub/expression_evaluator.py
@@ -140,21 +140,33 @@
       return geo_util.LatLng(value.lat(), value.lng())
     raise TypeError('No conversion defined for type %s' % value_type)
 
-  def _Min(self, *nodes):
-    return min(self._Eval(node) for node in nodes)
+  def _Min(self, return_type, *nodes):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Min cannot be converted to a text type')
+    return min(self._Eval(
+        node, document_pb.FieldValue.NUMBER) for node in nodes)
 
-  def _Max(self, *nodes):
-    return max(self._Eval(node) for node in nodes)
+  def _Max(self, return_type, *nodes):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Max cannot be converted to a text type')
+    return max(self._Eval(
+        node, document_pb.FieldValue.NUMBER) for node in nodes)
 
-  def _Distance(self, *nodes):
+  def _Distance(self, return_type, *nodes):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Distance cannot be converted to a text type')
     lhs, rhs = nodes
-    return self._Eval(lhs) - self._Eval(rhs)
+    return (self._Eval(lhs, document_pb.FieldValue.GEO) -
+            self._Eval(rhs, document_pb.FieldValue.GEO))
 
-  def _Geopoint(self, *nodes):
-    latitude, longitude = (self._Eval(node) for node in nodes)
+  def _Geopoint(self, return_type, *nodes):
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Geopoint cannot be converted to a text type')
+    latitude, longitude = (self._Eval(
+        node, document_pb.FieldValue.NUMBER) for node in nodes)
     return geo_util.LatLng(latitude, longitude)
 
-  def _Count(self, node):
+  def _Count(self, return_type, node):
 
 
 
@@ -218,7 +230,7 @@
 
 
 
-  def _Snippet(self, query, field, *args):
+  def _Snippet(self, return_type, query, field, *args):
     """Create a snippet given a query and the field to query on.
 
     Args:
@@ -256,7 +268,6 @@
       for posting in postings:
         if posting.doc_id != self._doc_pb.id() or not posting.positions:
           continue
-
         field_val = self._GetFieldValue(
             search_util.GetFieldInDocument(self._doc_pb, field))
         if not field_val:
@@ -296,48 +307,67 @@
           '%s is currently unsupported on dev_appserver.' % method)
     return RaiseUnsupported
 
-  def _EvalBinaryOp(self, op, op_name, node):
-    """Evaluate a binary operator on the document.
+  def _EvalNumericBinaryOp(self, op, op_name, node, return_type):
+    """Evaluate a Numeric Binary operator on the document.
 
     Args:
       op: The operator function. Must take exactly two arguments.
       op_name: The name of the operator. Used in error messages.
       node: The expression AST node representing the operator application.
+      return_type: The type to retrieve for fields with multiple types
+        in the expression. Used when the field type is ambiguous and cannot be
+        inferred from the context. If None, we retrieve the first field type
+        found in doc list.
 
     Returns:
       The result of applying op to node's two children.
 
     Raises:
       ValueError: The node does not have exactly two children.
+      _ExpressionError: The return type is Text.
     """
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Expression cannot be converted to a text type')
     if len(node.children) != 2:
       raise ValueError('%s operator must always have two arguments' % op_name)
     n1, n2 = node.children
-    return op(self._Eval(n1), self._Eval(n2))
+    return op(self._Eval(n1, document_pb.FieldValue.NUMBER),
+              self._Eval(n2, document_pb.FieldValue.NUMBER))
 
-  def _EvalUnaryOp(self, op, op_name, node):
+  def _EvalNumericUnaryOp(self, op, op_name, node, return_type):
     """Evaluate a unary operator on the document.
 
     Args:
       op: The operator function. Must take exactly one argument.
       op_name: The name of the operator. Used in error messages.
       node: The expression AST node representing the operator application.
+      return_type: The type to retrieve for fields with multiple types
+        in the expression. Used when the field type is ambiguous and cannot be
+        inferred from the context. If None, we retrieve the first field type
+        found in doc list.
 
     Returns:
       The result of applying op to node's child.
 
     Raises:
       ValueError: The node does not have exactly one child.
+      _ExpressionError: The return type is Text.
     """
+    if return_type == search_util.EXPRESSION_RETURN_TYPE_TEXT:
+      raise _ExpressionError('Expression cannot be converted to a text type')
     if len(node.children) != 1:
       raise ValueError('%s operator must always have one arguments' % op_name)
-    return op(self._Eval(node.children[0]))
+    return op(self._Eval(node.children[0], document_pb.FieldValue.NUMBER))
 
-  def _Eval(self, node):
+  def _Eval(self, node, return_type=None):
     """Evaluate an expression node on the document.
 
     Args:
       node: The expression AST node representing an expression subtree.
+      return_type: The type to retrieve for fields with multiple types
+        in the expression. Used when the field type is ambiguous and cannot be
+        inferred from the context. If None, we retrieve the first field type
+        found in doc list.
 
     Returns:
       The Python value that maps to the value of node. Types are inferred from
@@ -357,19 +387,23 @@
       func = self._function_table[node.getType()]
 
 
-      return func(*node.children)
+      return func(return_type, *node.children)
 
     if node.getType() == ExpressionParser.PLUS:
-      return self._EvalBinaryOp(lambda a, b: a + b, 'addition', node)
+      return self._EvalNumericBinaryOp(lambda a, b: a + b, 'addition', node,
+                                       return_type)
     if node.getType() == ExpressionParser.MINUS:
-      return self._EvalBinaryOp(lambda a, b: a - b, 'subtraction', node)
+      return self._EvalNumericBinaryOp(lambda a, b: a - b, 'subtraction', node,
+                                       return_type)
     if node.getType() == ExpressionParser.DIV:
-      return self._EvalBinaryOp(lambda a, b: a / b, 'division', node)
+      return self._EvalNumericBinaryOp(lambda a, b: a / b, 'division', node,
+                                       return_type)
     if node.getType() == ExpressionParser.TIMES:
-      return self._EvalBinaryOp(lambda a, b: a * b, 'multiplication', node)
+      return self._EvalNumericBinaryOp(lambda a, b: a * b,
+                                       'multiplication', node, return_type)
     if node.getType() == ExpressionParser.NEG:
-      return self._EvalUnaryOp(lambda a: -a, 'negation', node)
-
+      return self._EvalNumericUnaryOp(lambda a: -a, 'negation', node,
+                                      return_type)
     if node.getType() in (ExpressionParser.INT, ExpressionParser.FLOAT):
       return float(query_parser.GetQueryNodeText(node))
     if node.getType() == ExpressionParser.PHRASE:
@@ -379,19 +413,24 @@
       name = query_parser.GetQueryNodeText(node)
       if name == '_score':
         return self._doc.score
-      field = search_util.GetFieldInDocument(self._doc_pb, name)
+      field = search_util.GetFieldInDocument(self._doc_pb, name,
+                                             return_type)
       if field:
         return self._GetFieldValue(field)
       raise _ExpressionError('No field %s in document' % name)
 
     raise _ExpressionError('Unable to handle node %s' % node)
 
-  def ValueOf(self, expression, default_value=None):
+  def ValueOf(self, expression, default_value=None, return_type=None):
     """Returns the value of an expression on a document.
 
     Args:
       expression: The expression string.
       default_value: The value to return if the expression cannot be evaluated.
+      return_type: The type the expression should evaluate to. Used to create
+        multiple sorts for ambiguous expressions. If None, the expression
+        evaluates to the inferred type or first type of a field it encounters in
+        a document.
 
     Returns:
       The value of the expression on the evaluator's document, or default_value
@@ -415,17 +454,25 @@
     name = query_parser.GetQueryNodeText(expression_tree)
     schema = self._inverted_index.GetSchema()
     if (expression_tree.getType() == ExpressionParser.NAME and
-        schema.IsType(name, document_pb.FieldValue.DATE)):
-      if isinstance(default_value, basestring):
-        try:
-          default_value = search_util.DeserializeDate(default_value)
-        except ValueError:
-          raise QueryExpressionEvaluationError(
-              'Default text value is not appropriate for sort expression \'' +
-              name + '\': failed to parse date \"' + default_value + '\"')
+        name in schema):
+      contains_text_result = False
+      for field_type in schema[name].type_list():
+        if field_type in search_util.TEXT_DOCUMENT_FIELD_TYPES:
+          contains_text_result = True
+
+
+      if (schema.IsType(name, document_pb.FieldValue.DATE) and
+          not contains_text_result):
+        if isinstance(default_value, basestring):
+          try:
+            default_value = search_util.DeserializeDate(default_value)
+          except ValueError:
+            raise QueryExpressionEvaluationError(
+                'Default text value is not appropriate for sort expression \'' +
+                name + '\': failed to parse date \"' + default_value + '\"')
     result = default_value
     try:
-      result = self._Eval(expression_tree)
+      result = self._Eval(expression_tree, return_type=return_type)
     except _ExpressionError, e:
 
 
diff --git a/google/appengine/api/taskqueue/taskqueue_stub.py b/google/appengine/api/taskqueue/taskqueue_stub.py
index d3b2500..9dddd30 100644
--- a/google/appengine/api/taskqueue/taskqueue_stub.py
+++ b/google/appengine/api/taskqueue/taskqueue_stub.py
@@ -329,9 +329,11 @@
         queue_dict['mode'] = 'push'
       queue_dict['acl'] = queue.acl
 
-      if queue.Oldest():
-        queue_dict['oldest_task'] = _FormatEta(queue.Oldest())
-        queue_dict['eta_delta'] = _EtaDelta(queue.Oldest(), now)
+
+      oldest_eta = queue.Oldest()
+      if oldest_eta:
+        queue_dict['oldest_task'] = _FormatEta(oldest_eta)
+        queue_dict['eta_delta'] = _EtaDelta(oldest_eta, now)
       else:
         queue_dict['oldest_task'] = ''
         queue_dict['eta_delta'] = ''
diff --git a/google/appengine/api/urlfetch.py b/google/appengine/api/urlfetch.py
index df485fa..50b3aac 100644
--- a/google/appengine/api/urlfetch.py
+++ b/google/appengine/api/urlfetch.py
@@ -194,7 +194,7 @@
       "PATH_INFO" not in os.environ):
     return False
 
-  scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
+  _, host_port, path, _, _ = urlparse.urlsplit(url)
 
   if host_port == os.environ['HTTP_HOST']:
     current_path = urllib2.unquote(os.environ['PATH_INFO'])
@@ -374,11 +374,8 @@
   try:
     rpc.check_success()
   except apiproxy_errors.RequestTooLargeError, err:
-    error_detail = ''
-    if err.error_detail:
-      error_detail = ' Error: ' + err.error_detail
     raise InvalidURLError(
-        'Invalid request URL: ' + url + error_detail)
+        'Request body too large fetching URL: ' + url)
   except apiproxy_errors.ApplicationError, err:
     error_detail = ''
     if err.error_detail:
diff --git a/google/appengine/api/user_service_stub.py b/google/appengine/api/user_service_stub.py
index ae4cfb9..177896a 100644
--- a/google/appengine/api/user_service_stub.py
+++ b/google/appengine/api/user_service_stub.py
@@ -199,8 +199,14 @@
     if host and protocol:
       return continue_url
 
-    protocol, host, _, _, _, _ = urlparse.urlparse(
-        self.request_data.get_request_url(request_id))
+    try:
+      protocol, host, _, _, _, _ = urlparse.urlparse(
+          self.request_data.get_request_url(request_id))
+    except KeyError:
+
+
+
+      pass
 
 
     if path == '':
diff --git a/google/appengine/cron/GrocLexer.py b/google/appengine/cron/GrocLexer.py
index d8338bb..af2460a 100644
--- a/google/appengine/cron/GrocLexer.py
+++ b/google/appengine/cron/GrocLexer.py
@@ -53,8 +53,8 @@
 FEBRUARY=29
 MONDAY=20
 SUNDAY=26
-JUNE=33
 TWO_DIGIT_HOUR_TIME=43
+JUNE=33
 OF=4
 JANUARY=28
 MINUTES=18
diff --git a/google/appengine/cron/GrocParser.py b/google/appengine/cron/GrocParser.py
index 35db8b8..3f5c2c3 100644
--- a/google/appengine/cron/GrocParser.py
+++ b/google/appengine/cron/GrocParser.py
@@ -89,8 +89,8 @@
 FEBRUARY=29
 MONDAY=20
 SUNDAY=26
-JUNE=33
 TWO_DIGIT_HOUR_TIME=43
+JUNE=33
 OF=4
 JANUARY=28
 MINUTES=18
diff --git a/google/appengine/datastore/datastore_v4_pb.py b/google/appengine/datastore/datastore_v4_pb.py
index 63cd322..424303d 100644
--- a/google/appengine/datastore/datastore_v4_pb.py
+++ b/google/appengine/datastore/datastore_v4_pb.py
@@ -177,7 +177,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Error'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugG4MgonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKOAQoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXIinAEKD0NvbXBvc2l0ZUZpbHRlchJDCghvcGVyYXRvchgBIAIoDjIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlci5PcGVyYXRvchIvCgZmaWx0ZXIYAiADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXIiEwoIT3BlcmF0b3ISBwoDQU5EEAEivgIKDlByb3BlcnR5RmlsdGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USQgoIb3BlcmF0b3IYAiACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlci5PcGVyYXRvchItCgV2YWx1ZRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlInsKCE9wZXJhdG9yEg0KCUxFU1NfVEhBThABEhYKEkxFU1NfVEhBTl9PUl9FUVVBTBACEhAKDEdSRUFURVJfVEhBThADEhkKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBAEEgkKBUVRVUFMEAUSEAoMSEFTX0FOQ0VTVE9SEAsisAEKCEdxbFF1ZXJ5EhQKDHF1ZXJ5X3N0cmluZxgBIAIoCRIcCg1hbGxvd19saXRlcmFsGAIgASgIOgVmYWxzZRI2CghuYW1lX2FyZxgDIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnEjgKCm51bWJlcl9hcmcYBCADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZyJaCgtHcWxRdWVyeUFyZxIMCgRuYW1lGAEgASgJEi0KBXZhbHVlGAIgASgLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSDgoGY3Vyc29yGAMgASgMIvcCChBRdWVyeVJlc3VsdEJhdGNoEkwKEmVudGl0eV9yZXN1bHRfdHlwZRgBIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdC5SZXN1bHRUeXBlEjwKDWVudGl0eV9yZXN1bHQYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSEgoKZW5kX2N1cnNvchgEIAEoDBJPCgxtb3JlX3Jlc3VsdHMYBSACKA4yOS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoLk1vcmVSZXN1bHRzVHlwZRIaCg9za2lwcGVkX3Jlc3VsdHMYBiABKAU6ATAiVgoPTW9yZVJlc3VsdHNUeXBlEhAKDE5PVF9GSU5JU0hFRBABEhwKGE1PUkVfUkVTVUxUU19BRlRFUl9MSU1JVBACEhMKD05PX01PUkVfUkVTVUxUUxADItwBCghNdXRhdGlvbhI3CgJvcBgBIAIoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uLk9wZXJhdGlvbhIpCgNrZXkYAiABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSLwoGZW50aXR5GAMgASgLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5IjsKCU9wZXJhdGlvbhIKCgZJTlNFUlQQARIKCgZVUERBVEUQAhIKCgZVUFNFUlQQAxIKCgZERUxFVEUQBCJTCg5NdXRhdGlvblJlc3VsdBIpCgNrZXkYAyABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoLbmV3X3ZlcnNpb24YBCABKAM6ATAipAIKEkRlcHJlY2F0ZWRNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFAoFZm9yY2UYBiABKAg6BWZhbHNlIusBChhEZXByZWNhdGVkTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoOdXBzZXJ0X3ZlcnNpb24YAyADKAMSFgoOdXBkYXRlX3ZlcnNpb24YBCADKAMSFgoOaW5zZXJ0X3ZlcnNpb24YBSADKAMSHgoWaW5zZXJ0X2F1dG9faWRfdmVyc2lvbhgGIAMoAxIWCg5kZWxldGVfdmVyc2lvbhgHIAMoAyK1AQoLUmVhZE9wdGlvbnMSVwoQcmVhZF9jb25zaXN0ZW5jeRgBIAEoDjI0LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zLlJlYWRDb25zaXN0ZW5jeToHREVGQVVMVBITCgt0cmFuc2FjdGlvbhgCIAEoDCI4Cg9SZWFkQ29uc2lzdGVuY3kSCwoHREVGQVVMVBAAEgoKBlNUUk9ORxABEgwKCEVWRU5UVUFMEAIidgoNTG9va3VwUmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxIpCgNrZXkYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkirgEKDkxvb2t1cFJlc3BvbnNlEjQKBWZvdW5kGAEgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EjYKB21pc3NpbmcYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSLgoIZGVmZXJyZWQYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiqwIKD1J1blF1ZXJ5UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxI6CgxwYXJ0aXRpb25faWQYAiABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZBItCgVxdWVyeRgDIAEoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5EjQKCWdxbF9xdWVyeRgHIAEoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5Eh0KFW1pbl9zYWZlX3RpbWVfc2Vjb25kcxgEIAEoAxIcChRzdWdnZXN0ZWRfYmF0Y2hfc2l6ZRgFIAEoBSJiChBSdW5RdWVyeVJlc3BvbnNlEjgKBWJhdGNoGAEgAigLMikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaBIUCgxxdWVyeV9oYW5kbGUYAiABKAwiLAoUQ29udGludWVRdWVyeVJlcXVlc3QSFAoMcXVlcnlfaGFuZGxlGAEgAigMIlEKFUNvbnRpbnVlUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2giUwoXQmVnaW5UcmFuc2FjdGlvblJlcXVlc3QSGgoLY3Jvc3NfZ3JvdXAYASABKAg6BWZhbHNlEhwKDWNyb3NzX3JlcXVlc3QYAiABKAg6BWZhbHNlIi8KGEJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZRITCgt0cmFuc2FjdGlvbhgBIAIoDCImCg9Sb2xsYmFja1JlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASACKAwiEgoQUm9sbGJhY2tSZXNwb25zZSLAAgoNQ29tbWl0UmVxdWVzdBITCgt0cmFuc2FjdGlvbhgBIAEoDBIzCghtdXRhdGlvbhgFIAMoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uEkgKE2RlcHJlY2F0ZWRfbXV0YXRpb24YAiABKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5EZXByZWNhdGVkTXV0YXRpb24SSAoEbW9kZRgEIAEoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QuTW9kZToNVFJBTlNBQ1RJT05BTBIfChBpZ25vcmVfcmVhZF9vbmx5GAYgASgIOgVmYWxzZSIwCgRNb2RlEhEKDVRSQU5TQUNUSU9OQUwQARIVChFOT05fVFJBTlNBQ1RJT05BTBACIsABCg5Db21taXRSZXNwb25zZRJACg9tdXRhdGlvbl9yZXN1bHQYAyADKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdBJVChpkZXByZWNhdGVkX211dGF0aW9uX3Jlc3VsdBgBIAEoCzIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvblJlc3VsdBIVCg1pbmRleF91cGRhdGVzGAQgASgFInMKEkFsbG9jYXRlSWRzUmVxdWVzdBIuCghhbGxvY2F0ZRgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRItCgdyZXNlcnZlGAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IkYKE0FsbG9jYXRlSWRzUmVzcG9uc2USLwoJYWxsb2NhdGVkGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IlgKDFdyaXRlUmVxdWVzdBJIChNkZXByZWNhdGVkX211dGF0aW9uGAEgAigLMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uMqIHChJEYXRhc3RvcmVWNFNlcnZpY2USeQoQQmVnaW5UcmFuc2FjdGlvbhIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0GjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlIgASYQoIUm9sbGJhY2sSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlIgASWwoGQ29tbWl0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgASYQoIUnVuUXVlcnkSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlIgAScAoNQ29udGludWVRdWVyeRItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0Gi4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlc3BvbnNlIgASWwoGTG9va3VwEiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASagoLQWxsb2NhdGVJZHMSKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QaLC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlIgASWAoDR2V0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASWQoFV3JpdGUSJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZSIAQiMKH2NvbS5nb29nbGUuYXBwaG9zdGluZy5kYXRhc3RvcmUgAQ=="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9yc3oJRXJyb3JDb2RliwGSAQtCQURfUkVRVUVTVJgBAYwBiwGSARZDT05DVVJSRU5UX1RSQU5TQUNUSU9OmAECjAGLAZIBDklOVEVSTkFMX0VSUk9SmAEDjAGLAZIBCk5FRURfSU5ERViYAQSMAYsBkgEHVElNRU9VVJgBBYwBiwGSARFQRVJNSVNTSU9OX0RFTklFRJgBBowBiwGSAQ5CSUdUQUJMRV9FUlJPUpgBB4wBiwGSARxDT01NSVRURURfQlVUX1NUSUxMX0FQUExZSU5HmAEIjAGLAZIBE0NBUEFCSUxJVFlfRElTQUJMRUSYAQmMAYsBkgEVVFJZX0FMVEVSTkFURV9CQUNLRU5EmAEKjAGLAZIBEVNBRkVfVElNRV9UT09fT0xEmAELjAF0ugHoMgonYXBwaG9zdGluZy9kYXRhc3RvcmUvZGF0YXN0b3JlX3Y0LnByb3RvEhdhcHBob3N0aW5nLmRhdGFzdG9yZS52NBokYXBwaG9zdGluZy9kYXRhc3RvcmUvZW50aXR5X3Y0LnByb3RvIosCCgVFcnJvciKBAgoJRXJyb3JDb2RlEg8KC0JBRF9SRVFVRVNUEAESGgoWQ09OQ1VSUkVOVF9UUkFOU0FDVElPThACEhIKDklOVEVSTkFMX0VSUk9SEAMSDgoKTkVFRF9JTkRFWBAEEgsKB1RJTUVPVVQQBRIVChFQRVJNSVNTSU9OX0RFTklFRBAGEhIKDkJJR1RBQkxFX0VSUk9SEAcSIAocQ09NTUlUVEVEX0JVVF9TVElMTF9BUFBMWUlORxAIEhcKE0NBUEFCSUxJVFlfRElTQUJMRUQQCRIZChVUUllfQUxURVJOQVRFX0JBQ0tFTkQQChIVChFTQUZFX1RJTUVfVE9PX09MRBALIoYBCgxFbnRpdHlSZXN1bHQSLwoGZW50aXR5GAEgAigLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Eg8KB3ZlcnNpb24YAiABKAMiNAoKUmVzdWx0VHlwZRIICgRGVUxMEAESDgoKUFJPSkVDVElPThACEgwKCEtFWV9PTkxZEAMi8QIKBVF1ZXJ5Ej8KCnByb2plY3Rpb24YAiADKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUV4cHJlc3Npb24SNQoEa2luZBgDIAMoCzInLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktpbmRFeHByZXNzaW9uEi8KBmZpbHRlchgEIAEoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkZpbHRlchI1CgVvcmRlchgFIAMoCzImLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXISPAoIZ3JvdXBfYnkYBiADKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRIUCgxzdGFydF9jdXJzb3IYByABKAwSEgoKZW5kX2N1cnNvchgIIAEoDBIRCgZvZmZzZXQYCiABKAU6ATASDQoFbGltaXQYCyABKAUiHgoOS2luZEV4cHJlc3Npb24SDAoEbmFtZRgBIAIoCSIhChFQcm9wZXJ0eVJlZmVyZW5jZRIMCgRuYW1lGAIgAigJItMBChJQcm9wZXJ0eUV4cHJlc3Npb24SPAoIcHJvcGVydHkYASACKAsyKi5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eVJlZmVyZW5jZRJdChRhZ2dyZWdhdGlvbl9mdW5jdGlvbhgCIAEoDjI/LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5RXhwcmVzc2lvbi5BZ2dyZWdhdGlvbkZ1bmN0aW9uIiAKE0FnZ3JlZ2F0aW9uRnVuY3Rpb24SCQoFRklSU1QQASLJAQoNUHJvcGVydHlPcmRlchI8Cghwcm9wZXJ0eRgBIAIoCzIqLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5UmVmZXJlbmNlEk4KCWRpcmVjdGlvbhgCIAEoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlByb3BlcnR5T3JkZXIuRGlyZWN0aW9uOglBU0NFTkRJTkciKgoJRGlyZWN0aW9uEg0KCUFTQ0VORElORxABEg4KCkRFU0NFTkRJTkcQAiKOAQoGRmlsdGVyEkIKEGNvbXBvc2l0ZV9maWx0ZXIYASABKAsyKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21wb3NpdGVGaWx0ZXISQAoPcHJvcGVydHlfZmlsdGVyGAIgASgLMicuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlGaWx0ZXIinAEKD0NvbXBvc2l0ZUZpbHRlchJDCghvcGVyYXRvchgBIAIoDjIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbXBvc2l0ZUZpbHRlci5PcGVyYXRvchIvCgZmaWx0ZXIYAiADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5GaWx0ZXIiEwoIT3BlcmF0b3ISBwoDQU5EEAEivgIKDlByb3BlcnR5RmlsdGVyEjwKCHByb3BlcnR5GAEgAigLMiouYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUHJvcGVydHlSZWZlcmVuY2USQgoIb3BlcmF0b3IYAiACKA4yMC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Qcm9wZXJ0eUZpbHRlci5PcGVyYXRvchItCgV2YWx1ZRgDIAIoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlZhbHVlInsKCE9wZXJhdG9yEg0KCUxFU1NfVEhBThABEhYKEkxFU1NfVEhBTl9PUl9FUVVBTBACEhAKDEdSRUFURVJfVEhBThADEhkKFUdSRUFURVJfVEhBTl9PUl9FUVVBTBAEEgkKBUVRVUFMEAUSEAoMSEFTX0FOQ0VTVE9SEAsisAEKCEdxbFF1ZXJ5EhQKDHF1ZXJ5X3N0cmluZxgBIAIoCRIcCg1hbGxvd19saXRlcmFsGAIgASgIOgVmYWxzZRI2CghuYW1lX2FyZxgDIAMoCzIkLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5QXJnEjgKCm51bWJlcl9hcmcYBCADKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5HcWxRdWVyeUFyZyJaCgtHcWxRdWVyeUFyZxIMCgRuYW1lGAEgASgJEi0KBXZhbHVlGAIgASgLMh4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuVmFsdWUSDgoGY3Vyc29yGAMgASgMIpEDChBRdWVyeVJlc3VsdEJhdGNoEkwKEmVudGl0eV9yZXN1bHRfdHlwZRgBIAIoDjIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdC5SZXN1bHRUeXBlEjwKDWVudGl0eV9yZXN1bHQYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSEgoKZW5kX2N1cnNvchgEIAEoDBJPCgxtb3JlX3Jlc3VsdHMYBSACKA4yOS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5RdWVyeVJlc3VsdEJhdGNoLk1vcmVSZXN1bHRzVHlwZRIaCg9za2lwcGVkX3Jlc3VsdHMYBiABKAU6ATASGAoQc25hcHNob3RfdmVyc2lvbhgHIAEoAyJWCg9Nb3JlUmVzdWx0c1R5cGUSEAoMTk9UX0ZJTklTSEVEEAESHAoYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUEAISEwoPTk9fTU9SRV9SRVNVTFRTEAMi8gEKCE11dGF0aW9uEkAKAm9wGAEgASgOMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTXV0YXRpb24uT3BlcmF0aW9uOgdVTktOT1dOEikKA2tleRgCIAEoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRIvCgZlbnRpdHkYAyABKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkiSAoJT3BlcmF0aW9uEgsKB1VOS05PV04QABIKCgZJTlNFUlQQARIKCgZVUERBVEUQAhIKCgZVUFNFUlQQAxIKCgZERUxFVEUQBCJTCg5NdXRhdGlvblJlc3VsdBIpCgNrZXkYAyABKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoLbmV3X3ZlcnNpb24YBCABKAM6ATAipAIKEkRlcHJlY2F0ZWRNdXRhdGlvbhIvCgZ1cHNlcnQYASADKAsyHy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHkSLwoGdXBkYXRlGAIgAygLMh8uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5Ei8KBmluc2VydBgDIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRI3Cg5pbnNlcnRfYXV0b19pZBgEIAMoCzIfLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eRIsCgZkZWxldGUYBSADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFAoFZm9yY2UYBiABKAg6BWZhbHNlIusBChhEZXByZWNhdGVkTXV0YXRpb25SZXN1bHQSFQoNaW5kZXhfdXBkYXRlcxgBIAIoBRI4ChJpbnNlcnRfYXV0b19pZF9rZXkYAiADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkSFgoOdXBzZXJ0X3ZlcnNpb24YAyADKAMSFgoOdXBkYXRlX3ZlcnNpb24YBCADKAMSFgoOaW5zZXJ0X3ZlcnNpb24YBSADKAMSHgoWaW5zZXJ0X2F1dG9faWRfdmVyc2lvbhgGIAMoAxIWCg5kZWxldGVfdmVyc2lvbhgHIAMoAyK1AQoLUmVhZE9wdGlvbnMSVwoQcmVhZF9jb25zaXN0ZW5jeRgBIAEoDjI0LmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlJlYWRPcHRpb25zLlJlYWRDb25zaXN0ZW5jeToHREVGQVVMVBITCgt0cmFuc2FjdGlvbhgCIAEoDCI4Cg9SZWFkQ29uc2lzdGVuY3kSCwoHREVGQVVMVBAAEgoKBlNUUk9ORxABEgwKCEVWRU5UVUFMEAIidgoNTG9va3VwUmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxIpCgNrZXkYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkirgEKDkxvb2t1cFJlc3BvbnNlEjQKBWZvdW5kGAEgAygLMiUuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRW50aXR5UmVzdWx0EjYKB21pc3NpbmcYAiADKAsyJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5FbnRpdHlSZXN1bHQSLgoIZGVmZXJyZWQYAyADKAsyHC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXkiqwIKD1J1blF1ZXJ5UmVxdWVzdBI6CgxyZWFkX29wdGlvbnMYASABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SZWFkT3B0aW9ucxI6CgxwYXJ0aXRpb25faWQYAiABKAsyJC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5QYXJ0aXRpb25JZBItCgVxdWVyeRgDIAEoCzIeLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5EjQKCWdxbF9xdWVyeRgHIAEoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkdxbFF1ZXJ5Eh0KFW1pbl9zYWZlX3RpbWVfc2Vjb25kcxgEIAEoAxIcChRzdWdnZXN0ZWRfYmF0Y2hfc2l6ZRgFIAEoBSJiChBSdW5RdWVyeVJlc3BvbnNlEjgKBWJhdGNoGAEgAigLMikuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuUXVlcnlSZXN1bHRCYXRjaBIUCgxxdWVyeV9oYW5kbGUYAiABKAwiLAoUQ29udGludWVRdWVyeVJlcXVlc3QSFAoMcXVlcnlfaGFuZGxlGAEgAigMIlEKFUNvbnRpbnVlUXVlcnlSZXNwb25zZRI4CgViYXRjaBgBIAIoCzIpLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2giUwoXQmVnaW5UcmFuc2FjdGlvblJlcXVlc3QSGgoLY3Jvc3NfZ3JvdXAYASABKAg6BWZhbHNlEhwKDWNyb3NzX3JlcXVlc3QYAiABKAg6BWZhbHNlIi8KGEJlZ2luVHJhbnNhY3Rpb25SZXNwb25zZRITCgt0cmFuc2FjdGlvbhgBIAIoDCImCg9Sb2xsYmFja1JlcXVlc3QSEwoLdHJhbnNhY3Rpb24YASACKAwiEgoQUm9sbGJhY2tSZXNwb25zZSLAAgoNQ29tbWl0UmVxdWVzdBITCgt0cmFuc2FjdGlvbhgBIAEoDBIzCghtdXRhdGlvbhgFIAMoCzIhLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uEkgKE2RlcHJlY2F0ZWRfbXV0YXRpb24YAiABKAsyKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5EZXByZWNhdGVkTXV0YXRpb24SSAoEbW9kZRgEIAEoDjIrLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlcXVlc3QuTW9kZToNVFJBTlNBQ1RJT05BTBIfChBpZ25vcmVfcmVhZF9vbmx5GAYgASgIOgVmYWxzZSIwCgRNb2RlEhEKDVRSQU5TQUNUSU9OQUwQARIVChFOT05fVFJBTlNBQ1RJT05BTBACIsABCg5Db21taXRSZXNwb25zZRJACg9tdXRhdGlvbl9yZXN1bHQYAyADKAsyJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5NdXRhdGlvblJlc3VsdBJVChpkZXByZWNhdGVkX211dGF0aW9uX3Jlc3VsdBgBIAEoCzIxLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkRlcHJlY2F0ZWRNdXRhdGlvblJlc3VsdBIVCg1pbmRleF91cGRhdGVzGAQgASgFInMKEkFsbG9jYXRlSWRzUmVxdWVzdBIuCghhbGxvY2F0ZRgBIAMoCzIcLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LktleRItCgdyZXNlcnZlGAIgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IkYKE0FsbG9jYXRlSWRzUmVzcG9uc2USLwoJYWxsb2NhdGVkGAEgAygLMhwuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuS2V5IlgKDFdyaXRlUmVxdWVzdBJIChNkZXByZWNhdGVkX211dGF0aW9uGAEgAigLMisuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRGVwcmVjYXRlZE11dGF0aW9uMqIHChJEYXRhc3RvcmVWNFNlcnZpY2USeQoQQmVnaW5UcmFuc2FjdGlvbhIwLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkJlZ2luVHJhbnNhY3Rpb25SZXF1ZXN0GjEuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQmVnaW5UcmFuc2FjdGlvblJlc3BvbnNlIgASYQoIUm9sbGJhY2sSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1JlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Sb2xsYmFja1Jlc3BvbnNlIgASWwoGQ29tbWl0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29tbWl0UmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbW1pdFJlc3BvbnNlIgASYQoIUnVuUXVlcnkSKC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlcXVlc3QaKS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5SdW5RdWVyeVJlc3BvbnNlIgAScAoNQ29udGludWVRdWVyeRItLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkNvbnRpbnVlUXVlcnlSZXF1ZXN0Gi4uYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuQ29udGludWVRdWVyeVJlc3BvbnNlIgASWwoGTG9va3VwEiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASagoLQWxsb2NhdGVJZHMSKy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1JlcXVlc3QaLC5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5BbGxvY2F0ZUlkc1Jlc3BvbnNlIgASWAoDR2V0EiYuYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuTG9va3VwUmVxdWVzdBonLmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lkxvb2t1cFJlc3BvbnNlIgASWQoFV3JpdGUSJS5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Xcml0ZVJlcXVlc3QaJy5hcHBob3N0aW5nLmRhdGFzdG9yZS52NC5Db21taXRSZXNwb25zZSIAQiMKH2NvbS5nb29nbGUuYXBwaG9zdGluZy5kYXRhc3RvcmUgAQ=="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2631,6 +2631,8 @@
   more_results_ = 0
   has_skipped_results_ = 0
   skipped_results_ = 0
+  has_snapshot_version_ = 0
+  snapshot_version_ = 0
 
   def __init__(self, contents=None):
     self.entity_result_ = []
@@ -2704,6 +2706,19 @@
 
   def has_skipped_results(self): return self.has_skipped_results_
 
+  def snapshot_version(self): return self.snapshot_version_
+
+  def set_snapshot_version(self, x):
+    self.has_snapshot_version_ = 1
+    self.snapshot_version_ = x
+
+  def clear_snapshot_version(self):
+    if self.has_snapshot_version_:
+      self.has_snapshot_version_ = 0
+      self.snapshot_version_ = 0
+
+  def has_snapshot_version(self): return self.has_snapshot_version_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -2712,6 +2727,7 @@
     if (x.has_end_cursor()): self.set_end_cursor(x.end_cursor())
     if (x.has_more_results()): self.set_more_results(x.more_results())
     if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
+    if (x.has_snapshot_version()): self.set_snapshot_version(x.snapshot_version())
 
   if _net_proto___parse__python is not None:
     def _CMergeFromString(self, s):
@@ -2753,6 +2769,8 @@
     if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
     if self.has_skipped_results_ != x.has_skipped_results_: return 0
     if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
+    if self.has_snapshot_version_ != x.has_snapshot_version_: return 0
+    if self.has_snapshot_version_ and self.snapshot_version_ != x.snapshot_version_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2777,6 +2795,7 @@
     if (self.has_end_cursor_): n += 1 + self.lengthString(len(self.end_cursor_))
     n += self.lengthVarInt64(self.more_results_)
     if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
+    if (self.has_snapshot_version_): n += 1 + self.lengthVarInt64(self.snapshot_version_)
     return n + 2
 
   def ByteSizePartial(self):
@@ -2791,6 +2810,7 @@
       n += 1
       n += self.lengthVarInt64(self.more_results_)
     if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
+    if (self.has_snapshot_version_): n += 1 + self.lengthVarInt64(self.snapshot_version_)
     return n
 
   def Clear(self):
@@ -2799,6 +2819,7 @@
     self.clear_end_cursor()
     self.clear_more_results()
     self.clear_skipped_results()
+    self.clear_snapshot_version()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(8)
@@ -2815,6 +2836,9 @@
     if (self.has_skipped_results_):
       out.putVarInt32(48)
       out.putVarInt32(self.skipped_results_)
+    if (self.has_snapshot_version_):
+      out.putVarInt32(56)
+      out.putVarInt64(self.snapshot_version_)
 
   def OutputPartial(self, out):
     if (self.has_entity_result_type_):
@@ -2833,6 +2857,9 @@
     if (self.has_skipped_results_):
       out.putVarInt32(48)
       out.putVarInt32(self.skipped_results_)
+    if (self.has_snapshot_version_):
+      out.putVarInt32(56)
+      out.putVarInt64(self.snapshot_version_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2855,6 +2882,9 @@
       if tt == 48:
         self.set_skipped_results(d.getVarInt32())
         continue
+      if tt == 56:
+        self.set_snapshot_version(d.getVarInt64())
+        continue
 
 
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
@@ -2875,6 +2905,7 @@
     if self.has_end_cursor_: res+=prefix+("end_cursor: %s\n" % self.DebugFormatString(self.end_cursor_))
     if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatInt32(self.more_results_))
     if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
+    if self.has_snapshot_version_: res+=prefix+("snapshot_version: %s\n" % self.DebugFormatInt64(self.snapshot_version_))
     return res
 
 
@@ -2886,6 +2917,7 @@
   kend_cursor = 4
   kmore_results = 5
   kskipped_results = 6
+  ksnapshot_version = 7
 
   _TEXT = _BuildTagLookupTable({
     0: "ErrorCode",
@@ -2894,7 +2926,8 @@
     4: "end_cursor",
     5: "more_results",
     6: "skipped_results",
-  }, 6)
+    7: "snapshot_version",
+  }, 7)
 
   _TYPES = _BuildTagLookupTable({
     0: ProtocolBuffer.Encoder.NUMERIC,
@@ -2903,14 +2936,15 @@
     4: ProtocolBuffer.Encoder.STRING,
     5: ProtocolBuffer.Encoder.NUMERIC,
     6: ProtocolBuffer.Encoder.NUMERIC,
-  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+    7: ProtocolBuffer.Encoder.NUMERIC,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.QueryResultBatch'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gTGhJlbnRpdHlfcmVzdWx0X3R5cGUgASgAMAU4AhQTGg1lbnRpdHlfcmVzdWx0IAIoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgplbmRfY3Vyc29yIAQoAjAJOAEUExoMbW9yZV9yZXN1bHRzIAUoADAFOAJoABQTGg9za2lwcGVkX3Jlc3VsdHMgBigAMAU4AUIBMKMBqgEHZGVmYXVsdLIBATCkARRzeg9Nb3JlUmVzdWx0c1R5cGWLAZIBDE5PVF9GSU5JU0hFRJgBAYwBiwGSARhNT1JFX1JFU1VMVFNfQUZURVJfTElNSVSYAQKMAYsBkgEPTk9fTU9SRV9SRVNVTFRTmAEDjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KKGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LlF1ZXJ5UmVzdWx0QmF0Y2gTGhJlbnRpdHlfcmVzdWx0X3R5cGUgASgAMAU4AhQTGg1lbnRpdHlfcmVzdWx0IAIoAjALOANKJGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eVJlc3VsdKMBqgEFY3R5cGWyAQZwcm90bzKkARQTGgplbmRfY3Vyc29yIAQoAjAJOAEUExoMbW9yZV9yZXN1bHRzIAUoADAFOAJoABQTGg9za2lwcGVkX3Jlc3VsdHMgBigAMAU4AUIBMKMBqgEHZGVmYXVsdLIBATCkARQTGhBzbmFwc2hvdF92ZXJzaW9uIAcoADADOAEUc3oPTW9yZVJlc3VsdHNUeXBliwGSAQxOT1RfRklOSVNIRUSYAQGMAYsBkgEYTU9SRV9SRVNVTFRTX0FGVEVSX0xJTUlUmAECjAGLAZIBD05PX01PUkVfUkVTVUxUU5gBA4wBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
@@ -2918,12 +2952,14 @@
 class Mutation(ProtocolBuffer.ProtocolMessage):
 
 
+  UNKNOWN      =    0
   INSERT       =    1
   UPDATE       =    2
   UPSERT       =    3
   DELETE       =    4
 
   _Operation_NAMES = {
+    0: "UNKNOWN",
     1: "INSERT",
     2: "UPDATE",
     3: "UPSERT",
@@ -3041,26 +3077,20 @@
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
-    if (not self.has_op_):
-      initialized = 0
-      if debug_strs is not None:
-        debug_strs.append('Required field: op not set.')
     if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
-    n += self.lengthVarInt64(self.op_)
+    if (self.has_op_): n += 1 + self.lengthVarInt64(self.op_)
     if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
     if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
-    return n + 1
+    return n
 
   def ByteSizePartial(self):
     n = 0
-    if (self.has_op_):
-      n += 1
-      n += self.lengthVarInt64(self.op_)
+    if (self.has_op_): n += 1 + self.lengthVarInt64(self.op_)
     if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
     if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
     return n
@@ -3071,8 +3101,9 @@
     self.clear_entity()
 
   def OutputUnchecked(self, out):
-    out.putVarInt32(8)
-    out.putVarInt32(self.op_)
+    if (self.has_op_):
+      out.putVarInt32(8)
+      out.putVarInt32(self.op_)
     if (self.has_key_):
       out.putVarInt32(18)
       out.putVarInt32(self.key_.ByteSize())
@@ -3159,7 +3190,7 @@
   _STYLE_CONTENT_TYPE = """"""
   _PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Mutation'
   _SERIALIZED_DESCRIPTOR = array.array('B')
-  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uExoCb3AgASgAMAU4AmgAFBMaA2tleSACKAIwCzgBShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGZW50aXR5IAMoAjALOAFKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARRzeglPcGVyYXRpb26LAZIBBklOU0VSVJgBAYwBiwGSAQZVUERBVEWYAQKMAYsBkgEGVVBTRVJUmAEDjAGLAZIBBkRFTEVURZgBBIwBdMIBHWFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVycm9y"))
+  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WidhcHBob3N0aW5nL2RhdGFzdG9yZS9kYXRhc3RvcmVfdjQucHJvdG8KIGFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0Lk11dGF0aW9uExoCb3AgASgAMAU4AUIBMGgAowGqAQdkZWZhdWx0sgEHVU5LTk9XTqQBFBMaA2tleSACKAIwCzgBShthcHBob3N0aW5nLmRhdGFzdG9yZS52NC5LZXmjAaoBBWN0eXBlsgEGcHJvdG8ypAEUExoGZW50aXR5IAMoAjALOAFKHmFwcGhvc3RpbmcuZGF0YXN0b3JlLnY0LkVudGl0eaMBqgEFY3R5cGWyAQZwcm90bzKkARRzeglPcGVyYXRpb26LAZIBB1VOS05PV06YAQCMAYsBkgEGSU5TRVJUmAEBjAGLAZIBBlVQREFURZgBAowBiwGSAQZVUFNFUlSYAQOMAYsBkgEGREVMRVRFmAEEjAF0wgEdYXBwaG9zdGluZy5kYXRhc3RvcmUudjQuRXJyb3I="))
   if _net_proto___parse__python is not None:
     _net_proto___parse__python.RegisterType(
         _SERIALIZED_DESCRIPTOR.tostring())
diff --git a/google/appengine/ext/analytics/static/analytics_js.js b/google/appengine/ext/analytics/static/analytics_js.js
index ba68776..4e13332 100644
--- a/google/appengine/ext/analytics/static/analytics_js.js
+++ b/google/appengine/ext/analytics/static/analytics_js.js
@@ -1,18 +1,18 @@
 /* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){var l,m=this,n=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";
 else if("function"==b&&"undefined"==typeof a.call)return"object";return b},q=function(a){return"string"==typeof a},r=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},aa=Date.now||function(){return+new Date},s=function(a,b){var c=a.split("."),d=m;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},t=function(a,b){function c(){}
 c.prototype=b.prototype;a.o=b.prototype;a.prototype=new c;a.r=function(a,c,f){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var u=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,u);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};t(u,Error);var ba=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},v=function(a){a=String(a);var b=a.indexOf(".");-1==b&&(b=a.length);b=Math.max(0,2-b);return Array(b+1).join("0")+a},x=function(a,b){return a<b?-1:a>b?1:0};var y=function(a,b){b.unshift(a);u.call(this,ba.apply(null,b));b.shift()};t(y,u);var A=function(a,b,c){if(!a){var d="Assertion failed";if(b)var d=d+(": "+b),e=Array.prototype.slice.call(arguments,2);throw new y(""+d,e||[]);}};var B=Array.prototype,C=B.indexOf?function(a,b,c){A(null!=a.length);return B.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(q(a))return q(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},ca=B.forEach?function(a,b,c){A(null!=a.length);B.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=q(a)?a.split(""):a,f=0;f<d;f++)f in e&&b.call(c,e[f],f,a)},da=B.filter?function(a,b,c){A(null!=a.length);return B.filter.call(a,b,
-c)}:function(a,b,c){for(var d=a.length,e=[],f=0,g=q(a)?a.split(""):a,h=0;h<d;h++)if(h in g){var w=g[h];b.call(c,w,h,a)&&(e[f++]=w)}return e},D=function(a){var b=a.length;if(0<b){for(var c=Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},E=function(a,b,c){A(null!=a.length);return 2>=arguments.length?B.slice.call(a,b):B.slice.call(a,b,c)};var F;t:{var G=m.navigator;if(G){var H=G.userAgent;if(H){F=H;break t}}F=""};var I=-1!=F.indexOf("Opera")||-1!=F.indexOf("OPR"),J=-1!=F.indexOf("Trident")||-1!=F.indexOf("MSIE"),K=-1!=F.indexOf("Gecko")&&-1==F.toLowerCase().indexOf("webkit")&&!(-1!=F.indexOf("Trident")||-1!=F.indexOf("MSIE")),L=-1!=F.toLowerCase().indexOf("webkit"),M=function(){var a=m.document;return a?a.documentMode:void 0},N=function(){var a="",b;if(I&&m.opera)return a=m.opera.version,"function"==n(a)?a():a;K?b=/rv\:([^\);]+)(\)|;)/:J?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:L&&(b=/WebKit\/(\S+)/);b&&(a=(a=
-b.exec(F))?a[1]:"");return J&&(b=M(),b>parseFloat(a))?String(b):a}(),O={},P=function(a){var b;if(!(b=O[a])){b=0;for(var c=String(N).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=Math.max(c.length,d.length),f=0;0==b&&f<e;f++){var g=c[f]||"",h=d[f]||"",w=RegExp("(\\d*)(\\D*)","g"),p=RegExp("(\\d*)(\\D*)","g");do{var k=w.exec(g)||["","",""],z=p.exec(h)||["","",""];if(0==k[0].length&&0==z[0].length)break;b=x(0==k[1].length?0:parseInt(k[1],
-10),0==z[1].length?0:parseInt(z[1],10))||x(0==k[2].length,0==z[2].length)||x(k[2],z[2])}while(0==b)}b=O[a]=0<=b}return b},Q=m.document,ea=Q&&J?M()||("CSS1Compat"==Q.compatMode?parseInt(N,10):5):void 0;!K&&!J||J&&J&&9<=ea||K&&P("1.9.1");J&&P("9");var fa=function(a){a=a.className;return q(a)&&a.match(/\S+/g)||[]},ga=function(a,b){for(var c=fa(a),d=E(arguments,1),e=c,f=0;f<d.length;f++)0<=C(e,d[f])||e.push(d[f]);c=c.join(" ");a.className=c},ia=function(a,b){var c=fa(a),d=E(arguments,1),c=ha(c,d).join(" ");a.className=c},ha=function(a,b){return da(a,function(a){return!(0<=C(b,a))})};var R=function(a,b,c){var d=document;c=c||d;var e=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(e||b))return c.querySelectorAll(e+(b?"."+b:""));if(b&&c.getElementsByClassName){a=c.getElementsByClassName(b);if(e){c={};for(var f=d=0,g;g=a[f];f++)e==g.nodeName&&(c[d++]=g);c.length=d;return c}return a}a=c.getElementsByTagName(e||"*");if(b){c={};for(f=d=0;g=a[f];f++){var e=g.className,h;if(h="function"==typeof e.split)h=0<=C(e.split(/\s+/),b);h&&(c[d++]=g)}c.length=d;return c}return a};var S=function(a){S[" "](a);return a};S[" "]=function(){};var ja=!J||J&&9<=ea,ka=J&&!P("9");!L||P("528");K&&P("1.9b")||J&&P("8")||I&&P("9.5")||L&&P("528");K&&!P("8")||J&&P("9");var T=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.j=!1};T.prototype.preventDefault=function(){this.defaultPrevented=!0};var U=function(a,b){T.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.k=this.state=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(K){var e;t:{try{S(d.nodeName);e=!0;break t}catch(f){}e=!1}e||(d=null)}}else"mouseover"==
-c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=L||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=L||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
+c)}:function(a,b,c){for(var d=a.length,e=[],f=0,g=q(a)?a.split(""):a,h=0;h<d;h++)if(h in g){var w=g[h];b.call(c,w,h,a)&&(e[f++]=w)}return e},ea=function(a){var b=a.length;if(0<b){for(var c=Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},D=function(a,b,c){A(null!=a.length);return 2>=arguments.length?B.slice.call(a,b):B.slice.call(a,b,c)};var E;t:{var F=m.navigator;if(F){var G=F.userAgent;if(G){E=G;break t}}E=""};var H=-1!=E.indexOf("Opera")||-1!=E.indexOf("OPR"),I=-1!=E.indexOf("Trident")||-1!=E.indexOf("MSIE"),J=-1!=E.indexOf("Gecko")&&-1==E.toLowerCase().indexOf("webkit")&&!(-1!=E.indexOf("Trident")||-1!=E.indexOf("MSIE")),K=-1!=E.toLowerCase().indexOf("webkit"),L=function(){var a=m.document;return a?a.documentMode:void 0},M=function(){var a="",b;if(H&&m.opera)return a=m.opera.version,"function"==n(a)?a():a;J?b=/rv\:([^\);]+)(\)|;)/:I?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:K&&(b=/WebKit\/(\S+)/);b&&(a=(a=
+b.exec(E))?a[1]:"");return I&&(b=L(),b>parseFloat(a))?String(b):a}(),N={},O=function(a){var b;if(!(b=N[a])){b=0;for(var c=String(M).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),d=String(a).replace(/^[\s\xa0]+|[\s\xa0]+$/g,"").split("."),e=Math.max(c.length,d.length),f=0;0==b&&f<e;f++){var g=c[f]||"",h=d[f]||"",w=RegExp("(\\d*)(\\D*)","g"),p=RegExp("(\\d*)(\\D*)","g");do{var k=w.exec(g)||["","",""],z=p.exec(h)||["","",""];if(0==k[0].length&&0==z[0].length)break;b=x(0==k[1].length?0:parseInt(k[1],
+10),0==z[1].length?0:parseInt(z[1],10))||x(0==k[2].length,0==z[2].length)||x(k[2],z[2])}while(0==b)}b=N[a]=0<=b}return b},P=m.document,Q=P&&I?L()||("CSS1Compat"==P.compatMode?parseInt(M,10):5):void 0;!J&&!I||I&&I&&9<=Q||J&&O("1.9.1");I&&O("9");var fa=function(a){a=a.className;return q(a)&&a.match(/\S+/g)||[]},ga=function(a,b){for(var c=fa(a),d=D(arguments,1),e=c,f=0;f<d.length;f++)0<=C(e,d[f])||e.push(d[f]);c=c.join(" ");a.className=c},ia=function(a,b){var c=fa(a),d=D(arguments,1),c=ha(c,d).join(" ");a.className=c},ha=function(a,b){return da(a,function(a){return!(0<=C(b,a))})};var R=function(a,b,c){var d=document;c=c||d;var e=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(e||b))return c.querySelectorAll(e+(b?"."+b:""));if(b&&c.getElementsByClassName){a=c.getElementsByClassName(b);if(e){c={};for(var f=d=0,g;g=a[f];f++)e==g.nodeName&&(c[d++]=g);c.length=d;return c}return a}a=c.getElementsByTagName(e||"*");if(b){c={};for(f=d=0;g=a[f];f++){var e=g.className,h;if(h="function"==typeof e.split)h=0<=C(e.split(/\s+/),b);h&&(c[d++]=g)}c.length=d;return c}return a};var S=function(a){S[" "](a);return a};S[" "]=function(){};var ja=!I||I&&9<=Q,ka=I&&!O("9");!K||O("528");J&&O("1.9b")||I&&O("8")||H&&O("9.5")||K&&O("528");J&&!O("8")||I&&O("9");var T=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.j=!1};T.prototype.preventDefault=function(){this.defaultPrevented=!0};var U=function(a,b){T.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.k=this.state=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(J){var e;t:{try{S(d.nodeName);e=!0;break t}catch(f){}e=!1}e||(d=null)}}else"mouseover"==
+c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=K||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=K||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
 a.metaKey;this.state=a.state;this.k=a;a.defaultPrevented&&this.preventDefault()}};t(U,T);U.prototype.preventDefault=function(){U.o.preventDefault.call(this);var a=this.k;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,ka)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var la="closure_listenable_"+(1E6*Math.random()|0),ma=function(a){try{return!(!a||!a[la])}catch(b){return!1}},na=0;var oa=function(a,b,c,d,e){this.c=a;this.e=null;this.src=b;this.type=c;this.g=!!d;this.f=e;this.key=++na;this.d=this.h=!1},pa=function(a){a.d=!0;a.c=null;a.e=null;a.src=null;a.f=null};var V=function(a){this.src=a;this.b={};this.i=0};V.prototype.add=function(a,b,c,d,e){var f=a.toString();a=this.b[f];a||(a=this.b[f]=[],this.i++);var g;t:{for(g=0;g<a.length;++g){var h=a[g];if(!h.d&&h.c==b&&h.g==!!d&&h.f==e)break t}g=-1}-1<g?(b=a[g],c||(b.h=!1)):(b=new oa(b,this.src,f,!!d,e),b.h=c,a.push(b));return b};var qa=function(a,b){var c=b.type;if(c in a.b){var d=a.b[c],e=C(d,b),f;if(f=0<=e)A(null!=d.length),B.splice.call(d,e,1);f&&(pa(b),0==a.b[c].length&&(delete a.b[c],a.i--))}};var W="closure_lm_"+(1E6*Math.random()|0),X={},ra=0,ta=function(){var a=sa,b=ja?function(c){return a.call(b.src,b.c,c)}:function(c){c=a.call(b.src,b.c,c);if(!c)return c};return b},ua=function(a,b,c,d,e){if("array"==n(b))for(var f=0;f<b.length;f++)ua(a,b[f],c,d,e);else if(c=va(c),ma(a))a.l.add(String(b),c,!0,d,e);else{if(!b)throw Error("Invalid event type");var f=!!d,g=Y(a);g||(a[W]=g=new V(a));c=g.add(b,c,!0,d,e);c.e||(d=ta(),c.e=d,d.src=a,d.c=c,a.addEventListener?a.addEventListener(b.toString(),
-d,f):a.attachEvent(wa(b.toString()),d),ra++)}},wa=function(a){return a in X?X[a]:X[a]="on"+a},ya=function(a,b,c,d){var e=1;if(a=Y(a))if(b=a.b[b.toString()])for(b=D(b),a=0;a<b.length;a++){var f=b[a];f&&f.g==c&&!f.d&&(e&=!1!==xa(f,d))}return Boolean(e)},xa=function(a,b){var c=a.c,d=a.f||a.src;if(a.h&&"number"!=typeof a&&a&&!a.d){var e=a.src;if(ma(e))qa(e.l,a);else{var f=a.type,g=a.e;e.removeEventListener?e.removeEventListener(f,g,a.g):e.detachEvent&&e.detachEvent(wa(f),g);ra--;(f=Y(e))?(qa(f,a),0==
-f.i&&(f.src=null,e[W]=null)):pa(a)}}return c.call(d,b)},sa=function(a,b){if(a.d)return!0;if(!ja){var c;if(!(c=b))t:{c=["window","event"];for(var d=m,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new U(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var f=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(g){f=!0}if(f||void 0==e.returnValue)e.returnValue=!0}e=[];for(f=c.currentTarget;f;f=f.parentNode)e.push(f);for(var f=a.type,h=e.length-1;!c.j&&0<=h;h--)c.currentTarget=
+d,f):a.attachEvent(wa(b.toString()),d),ra++)}},wa=function(a){return a in X?X[a]:X[a]="on"+a},ya=function(a,b,c,d){var e=1;if(a=Y(a))if(b=a.b[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var f=b[a];f&&f.g==c&&!f.d&&(e&=!1!==xa(f,d))}return Boolean(e)},xa=function(a,b){var c=a.c,d=a.f||a.src;if(a.h&&"number"!=typeof a&&a&&!a.d){var e=a.src;if(ma(e))qa(e.l,a);else{var f=a.type,g=a.e;e.removeEventListener?e.removeEventListener(f,g,a.g):e.detachEvent&&e.detachEvent(wa(f),g);ra--;(f=Y(e))?(qa(f,
+a),0==f.i&&(f.src=null,e[W]=null)):pa(a)}}return c.call(d,b)},sa=function(a,b){if(a.d)return!0;if(!ja){var c;if(!(c=b))t:{c=["window","event"];for(var d=m,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new U(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var f=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(g){f=!0}if(f||void 0==e.returnValue)e.returnValue=!0}e=[];for(f=c.currentTarget;f;f=f.parentNode)e.push(f);for(var f=a.type,h=e.length-1;!c.j&&0<=h;h--)c.currentTarget=
 e[h],d&=ya(e[h],f,!0,c);for(h=0;!c.j&&h<e.length;h++)c.currentTarget=e[h],d&=ya(e[h],f,!1,c)}return d}return xa(a,new U(b,this))},Y=function(a){a=a[W];return a instanceof V?a:null},za="__closure_events_fn_"+(1E9*Math.random()>>>0),va=function(a){A(a,"Listener can not be null.");if("function"==n(a))return a;A(a.handleEvent,"An object listener must have handleEvent method.");return a[za]||(a[za]=function(b){return a.handleEvent(b)})};var $=function(a,b,c){"number"==typeof a?(this.a=Aa(a,b||0,c||1),Z(this,c||1)):(b=typeof a,"object"==b&&null!=a||"function"==b?(this.a=Aa(a.getFullYear(),a.getMonth(),a.getDate()),Z(this,a.getDate())):(this.a=new Date(aa()),this.a.setHours(0),this.a.setMinutes(0),this.a.setSeconds(0),this.a.setMilliseconds(0)))},Aa=function(a,b,c){b=new Date(a,b,c);0<=a&&100>a&&b.setFullYear(b.getFullYear()-1900);return b};l=$.prototype;l.getFullYear=function(){return this.a.getFullYear()};l.getYear=function(){return this.getFullYear()};
 l.getMonth=function(){return this.a.getMonth()};l.getDate=function(){return this.a.getDate()};l.getTime=function(){return this.a.getTime()};l.getUTCHours=function(){return this.a.getUTCHours()};l.setFullYear=function(a){this.a.setFullYear(a)};l.setMonth=function(a){this.a.setMonth(a)};l.setDate=function(a){this.a.setDate(a)};
 l.add=function(a){if(a.p||a.n){var b=this.getMonth()+a.n+12*a.p,c=this.getYear()+Math.floor(b/12),b=b%12;0>b&&(b+=12);var d;t:{switch(b){case 1:d=0!=c%4||0==c%100&&0!=c%400?28:29;break t;case 5:case 8:case 10:case 3:d=30;break t}d=31}d=Math.min(d,this.getDate());this.setDate(1);this.setFullYear(c);this.setMonth(b);this.setDate(d)}a.m&&(b=new Date(this.getYear(),this.getMonth(),this.getDate(),12),a=new Date(b.getTime()+864E5*a.m),this.setDate(1),this.setFullYear(a.getFullYear()),this.setMonth(a.getMonth()),
-this.setDate(a.getDate()),Z(this,a.getDate()))};l.q=function(){return[this.getFullYear(),v(this.getMonth()+1),v(this.getDate())].join("")+""};l.toString=function(){return this.q()};var Z=function(a,b){if(a.getDate()!=b){var c=a.getDate()<b?1:-1;a.a.setUTCHours(a.a.getUTCHours()+c)}};$.prototype.valueOf=function(){return this.a.valueOf()};new $(0,0,1);new $(9999,11,31);J||L&&P("525");s("ae.init",function(){Ba();Ca();ua(window,"load",function(){});Da()});
-var Ba=function(){var a;a=document;if(a=q("ae-content")?a.getElementById("ae-content"):"ae-content"){a=R("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=R("tbody",null,c);for(var d=0,e;e=c[d];d++){e=R("tr",null,e);for(var f=0,g;g=e[f];f++)f%2&&ga(g,"ae-even")}}}},Ca=function(){var a=R(null,"ae-noscript",void 0);ca(D(a),function(a){ia(a,"ae-noscript")})},Da=function(){m._gaq=m._gaq||[];m._gaq.push(function(){m._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
+this.setDate(a.getDate()),Z(this,a.getDate()))};l.q=function(){return[this.getFullYear(),v(this.getMonth()+1),v(this.getDate())].join("")+""};l.toString=function(){return this.q()};var Z=function(a,b){if(a.getDate()!=b){var c=a.getDate()<b?1:-1;a.a.setUTCHours(a.a.getUTCHours()+c)}};$.prototype.valueOf=function(){return this.a.valueOf()};new $(0,0,1);new $(9999,11,31);I||K&&O("525");s("ae.init",function(){Ba();Ca();ua(window,"load",function(){});Da()});
+var Ba=function(){var a;a=document;if(a=q("ae-content")?a.getElementById("ae-content"):"ae-content"){a=R("table","ae-table-striped",a);for(var b=0,c;c=a[b];b++){c=R("tbody",null,c);for(var d=0,e;e=c[d];d++){e=R("tr",null,e);for(var f=0,g;g=e[f];f++)f%2&&ga(g,"ae-even")}}}},Ca=function(){var a=R(null,"ae-noscript",void 0);ca(ea(a),function(a){ia(a,"ae-noscript")})},Da=function(){m._gaq=m._gaq||[];m._gaq.push(function(){m._gaq._createAsyncTracker("UA-3739047-3","ae")._trackPageview()});(function(){var a=
 document.createElement("script");a.src=("https:"==document.location.protocol?"https://ssl":"http://www")+".google-analytics.com/ga.js";a.setAttribute("async","true");document.documentElement.firstChild.appendChild(a)})()};s("ae.trackPageView",function(){m._gaq&&m._gaq._getAsyncTracker("ae")._trackPageview()});var Fa=function(a){if(void 0==a||null==a||0==a.length)return 0;a=Math.max.apply(Math,a);return Ea(a)},Ea=function(a){var b=5;2>b&&(b=2);b-=1;return Math.ceil(a/b)*b},Ga=function(a,b,c){a=a.getSelection();1==a.length&&(a=a[0],null!=a.row&&(null!=b.starttime&&(c+="&starttime="+b.starttime),null!=b.endtime&&(c+="&endtime="+b.endtime),null!=b.latency_lower&&(c+="&latency_lower="+b.latency_lower),null!=b.latency_upper&&(c+="&latency_upper="+b.latency_upper),b=c+"&detail="+a.row,window.location.href=b))},
 Ha=function(a,b,c,d,e){var f=new google.visualization.DataTable;f.addColumn("string","");f.addColumn("number","");f.addColumn({type:"string",role:"tooltip"});for(var g=0;g<b.length;g++)f.addRow(["",b[g],c[g]]);c=Math.max(10*b.length,200);b=Fa(b);a=new google.visualization.ColumnChart(document.getElementById("rpctime-"+a));a.draw(f,{height:100,width:c,legend:"none",chartArea:{left:40},fontSize:11,vAxis:{minValue:0,maxValue:b,gridlines:{count:5}}});google.visualization.events.addListener(a,"select",
 r(Ga,a,d,e))};s("ae.Charts.latencyHistogram",function(a,b,c){var d=new google.visualization.DataTable;d.addColumn("string","");d.addColumn("number","");for(var e=0;e<b.length;e++)d.addRow([""+a[e],b[e]]);for(e=b.length;e<a.length;e++)d.addRow([""+a[e],0]);b=Fa(b);(new google.visualization.ColumnChart(document.getElementById("latency-"+c))).draw(d,{legend:"none",width:20*a.length,height:200,vAxis:{maxValue:b,gridlines:{count:5}}})});
diff --git a/google/appengine/ext/appstats/static/appstats_js.js b/google/appengine/ext/appstats/static/appstats_js.js
index c21bead..5652e87 100644
--- a/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google/appengine/ext/appstats/static/appstats_js.js
@@ -1,87 +1,87 @@
 /* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){var f,l=this,aa=function(){},ba=function(a){a.ga=function(){return a.Fb?a.Fb:a.Fb=new a}},ca=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=
 typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},da=function(a){return"array"==ca(a)},ea=function(a){var b=ca(a);return"array"==b||"object"==b&&"number"==typeof a.length},m=function(a){return"string"==typeof a},n=function(a){return"function"==ca(a)},fa=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ka=function(a){return a[ga]||
-(a[ga]=++ha)},ga="closure_uid_"+(1E9*Math.random()>>>0),ha=0,la=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ma=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},p=function(a,b){function c(){}c.prototype=b.prototype;a.e=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.mc=
-function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var na=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,na);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};p(na,Error);na.prototype.name="CustomError";var oa;var pa=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},qa=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},xa=function(a){if(!ra.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(sa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ta,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(ua,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(va,"&quot;"));-1!=a.indexOf("'")&&(a=a.replace(wa,"&#39;"));return a},sa=
-/&/g,ta=/</g,ua=/>/g,va=/"/g,wa=/'/g,ra=/[&<>"']/,ya=function(a,b){return a<b?-1:a>b?1:0};var za=function(a,b){b.unshift(a);na.call(this,pa.apply(null,b));b.shift()};p(za,na);za.prototype.name="AssertionError";var Aa=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new za(""+d,e||[]);},r=function(a,b,c){a||Aa("",b,Array.prototype.slice.call(arguments,2))},Ba=function(a,b,c,d){a instanceof b||Aa("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var s=Array.prototype,Ca=s.indexOf?function(a,b,c){r(null!=a.length);return s.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},t=s.forEach?function(a,b,c){r(null!=a.length);s.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Da=s.filter?function(a,b,c){r(null!=a.length);return s.filter.call(a,b,
-c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var q=h[k];b.call(c,q,k,a)&&(e[g++]=q)}return e},Ea=s.every?function(a,b,c){r(null!=a.length);return s.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},u=function(a,b){return 0<=Ca(a,b)},Fa=function(a,b){var c=Ca(a,b),d;if(d=0<=c)r(null!=a.length),s.splice.call(a,c,1);return d},Ga=function(a){var b=a.length;if(0<b){for(var c=
-Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ia=function(a,b,c,d){r(null!=a.length);s.splice.apply(a,Ha(arguments,1))},Ha=function(a,b,c){r(null!=a.length);return 2>=arguments.length?s.slice.call(a,b):s.slice.call(a,b,c)};var Ja=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ka=function(a,b){for(var c=Ja(a),d=Ha(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)u(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},Ma=function(a,b){var c=Ja(a),d=Ha(arguments,1),c=La(c,d);a.className=c.join(" ")},La=function(a,b){return Da(a,function(a){return!u(b,a)})};var Na=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Oa=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Pa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Qa=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Ra="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Sa=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Ra.length;g++)c=
-Ra[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var v;t:{var Ta=l.navigator;if(Ta){var Ua=Ta.userAgent;if(Ua){v=Ua;break t}}v=""}var w=function(a){return-1!=v.indexOf(a)};var Va=w("Opera")||w("OPR"),x=w("Trident")||w("MSIE"),y=w("Gecko")&&-1==v.toLowerCase().indexOf("webkit")&&!(w("Trident")||w("MSIE")),z=-1!=v.toLowerCase().indexOf("webkit"),Wa=l.navigator||null,A=-1!=(Wa&&Wa.platform||"").indexOf("Mac"),Xa=function(){var a=l.document;return a?a.documentMode:void 0},Ya=function(){var a="",b;if(Va&&l.opera)return a=l.opera.version,n(a)?a():a;y?b=/rv\:([^\);]+)(\)|;)/:x?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:z&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(v))?a[1]:"");return x&&
-(b=Xa(),b>parseFloat(a))?String(b):a}(),Za={},B=function(a){var b;if(!(b=Za[a])){b=0;for(var c=qa(String(Ya)).split("."),d=qa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",q=RegExp("(\\d*)(\\D*)","g"),ia=RegExp("(\\d*)(\\D*)","g");do{var N=q.exec(h)||["","",""],ja=ia.exec(k)||["","",""];if(0==N[0].length&&0==ja[0].length)break;b=ya(0==N[1].length?0:parseInt(N[1],10),0==ja[1].length?0:parseInt(ja[1],10))||ya(0==N[2].length,0==ja[2].length)||ya(N[2],
-ja[2])}while(0==b)}b=Za[a]=0<=b}return b},$a=l.document,ab=$a&&x?Xa()||("CSS1Compat"==$a.compatMode?parseInt(Ya,10):5):void 0;var bb=!x||x&&9<=ab;!y&&!x||x&&x&&9<=ab||y&&B("1.9.1");var cb=x&&!B("9");var fb=function(a){return a?new db(eb(a)):oa||(oa=new db)},gb=function(a,b){return m(b)?a.getElementById(b):b},hb=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
-"function"==typeof a.split&&u(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},jb=function(a,b){Na(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ib?a.setAttribute(ib[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ib={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
-valign:"vAlign",width:"width"},lb=function(a,b,c){return kb(document,arguments)},kb=function(a,b){var c=b[0],d=b[1];if(!bb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',xa(d.name),'"');if(d.type){c.push(' type="',xa(d.type),'"');var e={};Sa(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?Ka.apply(null,[c].concat(d)):jb(c,d));2<b.length&&mb(a,c,b);return c},mb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
-2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}t(h?Ga(g):g,d)}}},nb=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},eb=function(a){r(a,"Node cannot be null or undefined.");
-return 9==a.nodeType?a:a.ownerDocument||a.document},ob=function(a,b){r(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=eb(a);a.appendChild(c.createTextNode(String(b)))}},pb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},qb={IMG:" ",BR:"\n"},
-rb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},sb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},tb=function(a,b,c){if(!(a.nodeName in pb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in qb)b.push(qb[a.nodeName]);else for(a=a.firstChild;a;)tb(a,b,c),a=a.nextSibling},db=function(a){this.Q=a||l.document||document};f=db.prototype;f.lb=fb;f.a=function(a){return gb(this.Q,a)};
-f.o=function(a,b,c){return kb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=nb;
-f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!rb(a)||sb(a)):rb(a)&&sb(a))&&x?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var ub=function(a){ub[" "](a);return a};ub[" "]=aa;var vb=!x||x&&9<=ab,wb=!x||x&&9<=ab,xb=x&&!B("9");!z||B("528");y&&B("1.9b")||x&&B("8")||Va&&B("9.5")||z&&B("528");y&&!B("8")||x&&B("9");var yb=function(){};yb.prototype.Ub=!1;var C=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.wb=!0};C.prototype.stopPropagation=function(){this.$=!0};C.prototype.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var D=function(a,b){C.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.jb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(y){var e;t:{try{ub(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
+(a[ga]=++ja)},ga="closure_uid_"+(1E9*Math.random()>>>0),ja=0,la=function(a,b){var c=Array.prototype.slice.call(arguments,1);return function(){var b=c.slice();b.push.apply(b,arguments);return a.apply(this,b)}},ma=function(a,b){var c=a.split("."),d=l;c[0]in d||!d.execScript||d.execScript("var "+c[0]);for(var e;c.length&&(e=c.shift());)c.length||void 0===b?d=d[e]?d[e]:d[e]={}:d[e]=b},p=function(a,b){function c(){}c.prototype=b.prototype;a.e=b.prototype;a.prototype=new c;a.prototype.constructor=a;a.mc=
+function(a,c,g){return b.prototype[c].apply(a,Array.prototype.slice.call(arguments,2))}};var na=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,na);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};p(na,Error);na.prototype.name="CustomError";var oa;var pa=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1<c.length;)d+=c.shift()+e.shift();return d+c.join("%s")},qa=function(a){return a.replace(/^[\s\xa0]+|[\s\xa0]+$/g,"")},ya=function(a){if(!ra.test(a))return a;-1!=a.indexOf("&")&&(a=a.replace(sa,"&amp;"));-1!=a.indexOf("<")&&(a=a.replace(ta,"&lt;"));-1!=a.indexOf(">")&&(a=a.replace(ua,"&gt;"));-1!=a.indexOf('"')&&(a=a.replace(va,"&quot;"));-1!=a.indexOf("'")&&(a=a.replace(wa,"&#39;"));-1!=a.indexOf("\x00")&&
+(a=a.replace(xa,"&#0;"));return a},sa=/&/g,ta=/</g,ua=/>/g,va=/"/g,wa=/'/g,xa=/\x00/g,ra=/[\x00&<>"']/,za=function(a,b){return a<b?-1:a>b?1:0};var Aa=function(a,b){b.unshift(a);na.call(this,pa.apply(null,b));b.shift()};p(Aa,na);Aa.prototype.name="AssertionError";var Ba=function(a,b,c){var d="Assertion failed";if(b)var d=d+(": "+b),e=c;else a&&(d+=": "+a,e=null);throw new Aa(""+d,e||[]);},r=function(a,b,c){a||Ba("",b,Array.prototype.slice.call(arguments,2))},Ca=function(a,b,c,d){a instanceof b||Ba("instanceof check failed.",c,Array.prototype.slice.call(arguments,3))};var s=Array.prototype,Da=s.indexOf?function(a,b,c){r(null!=a.length);return s.indexOf.call(a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a.length+c):c;if(m(a))return m(b)&&1==b.length?a.indexOf(b,c):-1;for(;c<a.length;c++)if(c in a&&a[c]===b)return c;return-1},t=s.forEach?function(a,b,c){r(null!=a.length);s.forEach.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)g in e&&b.call(c,e[g],g,a)},Ea=s.filter?function(a,b,c){r(null!=a.length);return s.filter.call(a,b,
+c)}:function(a,b,c){for(var d=a.length,e=[],g=0,h=m(a)?a.split(""):a,k=0;k<d;k++)if(k in h){var q=h[k];b.call(c,q,k,a)&&(e[g++]=q)}return e},Fa=s.every?function(a,b,c){r(null!=a.length);return s.every.call(a,b,c)}:function(a,b,c){for(var d=a.length,e=m(a)?a.split(""):a,g=0;g<d;g++)if(g in e&&!b.call(c,e[g],g,a))return!1;return!0},u=function(a,b){return 0<=Da(a,b)},Ga=function(a,b){var c=Da(a,b),d;if(d=0<=c)r(null!=a.length),s.splice.call(a,c,1);return d},Ha=function(a){var b=a.length;if(0<b){for(var c=
+Array(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},Ja=function(a,b,c,d){r(null!=a.length);s.splice.apply(a,Ia(arguments,1))},Ia=function(a,b,c){r(null!=a.length);return 2>=arguments.length?s.slice.call(a,b):s.slice.call(a,b,c)};var Ka=function(a){a=a.className;return m(a)&&a.match(/\S+/g)||[]},La=function(a,b){for(var c=Ka(a),d=Ia(arguments,1),e=c.length+d.length,g=c,h=0;h<d.length;h++)u(g,d[h])||g.push(d[h]);a.className=c.join(" ");return c.length==e},Na=function(a,b){var c=Ka(a),d=Ia(arguments,1),c=Ma(c,d);a.className=c.join(" ")},Ma=function(a,b){return Ea(a,function(a){return!u(b,a)})};var Oa=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Pa=function(a,b){for(var c in a)if(a[c]==b)return!0;return!1},Qa=function(a,b,c){if(b in a)throw Error('The object already contains the key "'+b+'"');a[b]=c},Ra=function(a){var b={},c;for(c in a)b[a[c]]=c;return b},Sa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ta=function(a,b){for(var c,d,e=1;e<arguments.length;e++){d=arguments[e];for(c in d)a[c]=d[c];for(var g=0;g<Sa.length;g++)c=
+Sa[g],Object.prototype.hasOwnProperty.call(d,c)&&(a[c]=d[c])}};var v;t:{var Ua=l.navigator;if(Ua){var Va=Ua.userAgent;if(Va){v=Va;break t}}v=""}var w=function(a){return-1!=v.indexOf(a)};var Wa=w("Opera")||w("OPR"),x=w("Trident")||w("MSIE"),y=w("Gecko")&&-1==v.toLowerCase().indexOf("webkit")&&!(w("Trident")||w("MSIE")),z=-1!=v.toLowerCase().indexOf("webkit"),Xa=l.navigator||null,A=-1!=(Xa&&Xa.platform||"").indexOf("Mac"),Ya=function(){var a=l.document;return a?a.documentMode:void 0},Za=function(){var a="",b;if(Wa&&l.opera)return a=l.opera.version,n(a)?a():a;y?b=/rv\:([^\);]+)(\)|;)/:x?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:z&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(v))?a[1]:"");return x&&
+(b=Ya(),b>parseFloat(a))?String(b):a}(),$a={},B=function(a){var b;if(!(b=$a[a])){b=0;for(var c=qa(String(Za)).split("."),d=qa(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&g<e;g++){var h=c[g]||"",k=d[g]||"",q=RegExp("(\\d*)(\\D*)","g"),ha=RegExp("(\\d*)(\\D*)","g");do{var N=q.exec(h)||["","",""],ia=ha.exec(k)||["","",""];if(0==N[0].length&&0==ia[0].length)break;b=za(0==N[1].length?0:parseInt(N[1],10),0==ia[1].length?0:parseInt(ia[1],10))||za(0==N[2].length,0==ia[2].length)||za(N[2],
+ia[2])}while(0==b)}b=$a[a]=0<=b}return b},ab=l.document,bb=ab&&x?Ya()||("CSS1Compat"==ab.compatMode?parseInt(Za,10):5):void 0;var cb=!x||x&&9<=bb;!y&&!x||x&&x&&9<=bb||y&&B("1.9.1");var db=x&&!B("9");var gb=function(a){return a?new eb(fb(a)):oa||(oa=new eb)},hb=function(a,b){return m(b)?a.getElementById(b):b},ib=function(a,b,c){var d=document;c=c||d;a=a&&"*"!=a?a.toUpperCase():"";if(c.querySelectorAll&&c.querySelector&&(a||b))return c.querySelectorAll(a+(b?"."+b:""));if(b&&c.getElementsByClassName){c=c.getElementsByClassName(b);if(a){for(var d={},e=0,g=0,h;h=c[g];g++)a==h.nodeName&&(d[e++]=h);d.length=e;return d}return c}c=c.getElementsByTagName(a||"*");if(b){d={};for(g=e=0;h=c[g];g++)a=h.className,
+"function"==typeof a.split&&u(a.split(/\s+/),b)&&(d[e++]=h);d.length=e;return d}return c},kb=function(a,b){Oa(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in jb?a.setAttribute(jb[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},jb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",
+valign:"vAlign",width:"width"},mb=function(a,b,c){return lb(document,arguments)},lb=function(a,b){var c=b[0],d=b[1];if(!cb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',ya(d.name),'"');if(d.type){c.push(' type="',ya(d.type),'"');var e={};Ta(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(m(d)?c.className=d:da(d)?La.apply(null,[c].concat(d)):kb(c,d));2<b.length&&nb(a,c,b);return c},nb=function(a,b,c){function d(c){c&&b.appendChild(m(c)?a.createTextNode(c):c)}for(var e=
+2;e<c.length;e++){var g=c[e];if(!ea(g)||fa(g)&&0<g.nodeType)d(g);else{var h;t:{if(g&&"number"==typeof g.length){if(fa(g)){h="function"==typeof g.item||"string"==typeof g.item;break t}if(n(g)){h="function"==typeof g.item;break t}}h=!1}t(h?Ha(g):g,d)}}},ob=function(a,b){if(a.contains&&1==b.nodeType)return a==b||a.contains(b);if("undefined"!=typeof a.compareDocumentPosition)return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b.parentNode;return b==a},fb=function(a){r(a,"Node cannot be null or undefined.");
+return 9==a.nodeType?a:a.ownerDocument||a.document},pb=function(a,b){r(null!=a,"goog.dom.setTextContent expects a non-null value for node");if("textContent"in a)a.textContent=b;else if(3==a.nodeType)a.data=b;else if(a.firstChild&&3==a.firstChild.nodeType){for(;a.lastChild!=a.firstChild;)a.removeChild(a.lastChild);a.firstChild.data=b}else{for(var c;c=a.firstChild;)a.removeChild(c);c=fb(a);a.appendChild(c.createTextNode(String(b)))}},qb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},rb={IMG:" ",BR:"\n"},
+sb=function(a){a=a.getAttributeNode("tabindex");return null!=a&&a.specified},tb=function(a){a=a.tabIndex;return"number"==typeof a&&0<=a&&32768>a},ub=function(a,b,c){if(!(a.nodeName in qb))if(3==a.nodeType)c?b.push(String(a.nodeValue).replace(/(\r\n|\r|\n)/g,"")):b.push(a.nodeValue);else if(a.nodeName in rb)b.push(rb[a.nodeName]);else for(a=a.firstChild;a;)ub(a,b,c),a=a.nextSibling},eb=function(a){this.Q=a||l.document||document};f=eb.prototype;f.lb=gb;f.a=function(a){return hb(this.Q,a)};
+f.o=function(a,b,c){return lb(this.Q,arguments)};f.createElement=function(a){return this.Q.createElement(a)};f.createTextNode=function(a){return this.Q.createTextNode(String(a))};f.appendChild=function(a,b){a.appendChild(b)};f.contains=ob;
+f.I=function(a){var b;(b="A"==a.tagName||"INPUT"==a.tagName||"TEXTAREA"==a.tagName||"SELECT"==a.tagName||"BUTTON"==a.tagName?!a.disabled&&(!sb(a)||tb(a)):sb(a)&&tb(a))&&x?(a=n(a.getBoundingClientRect)?a.getBoundingClientRect():{height:a.offsetHeight,width:a.offsetWidth},a=null!=a&&0<a.height&&0<a.width):a=b;return a};var vb=function(a){vb[" "](a);return a};vb[" "]=aa;var wb=!x||x&&9<=bb,xb=!x||x&&9<=bb,yb=x&&!B("9");!z||B("528");y&&B("1.9b")||x&&B("8")||Wa&&B("9.5")||z&&B("528");y&&!B("8")||x&&B("9");var zb=function(){};zb.prototype.Ub=!1;var C=function(a,b){this.type=a;this.currentTarget=this.target=b;this.defaultPrevented=this.$=!1;this.wb=!0};C.prototype.stopPropagation=function(){this.$=!0};C.prototype.preventDefault=function(){this.defaultPrevented=!0;this.wb=!1};var D=function(a,b){C.call(this,a?a.type:"");this.relatedTarget=this.currentTarget=this.target=null;this.charCode=this.keyCode=this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.state=null;this.jb=!1;this.O=null;if(a){var c=this.type=a.type;this.target=a.target||a.srcElement;this.currentTarget=b;var d=a.relatedTarget;if(d){if(y){var e;t:{try{vb(d.nodeName);e=!0;break t}catch(g){}e=!1}e||(d=null)}}else"mouseover"==
 c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=z||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=z||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;this.keyCode=a.keyCode||0;this.charCode=a.charCode||("keypress"==c?a.keyCode:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=
-a.metaKey;this.jb=A?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(D,C);var zb=[1,4,2],Ab=function(a){return vb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&zb[0])};D.prototype.stopPropagation=function(){D.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
-D.prototype.preventDefault=function(){D.e.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,xb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Bb="closure_listenable_"+(1E6*Math.random()|0),Cb=function(a){try{return!(!a||!a[Bb])}catch(b){return!1}},Db=0;var Eb=function(a,b,c,d,e){this.W=a;this.Da=null;this.src=b;this.type=c;this.Ea=!!d;this.Ga=e;this.key=++Db;this.fa=this.Fa=!1},Fb=function(a){a.fa=!0;a.W=null;a.Da=null;a.src=null;a.Ga=null};var E=function(a){this.src=a;this.m={};this.ua=0};E.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Gb(a,b,d,e);-1<h?(b=a[h],c||(b.Fa=!1)):(b=new Eb(b,this.src,g,!!d,e),b.Fa=c,a.push(b));return b};E.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Gb(e,b,c,d);return-1<b?(Fb(e[b]),r(null!=e.length),s.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
-var Hb=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Fa(a.m[c],b);d&&(Fb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};E.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Fb(d[e]);delete this.m[c];this.ua--}return b};E.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Gb(a,b,c,d));return-1<e?a[e]:null};
-var Gb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.fa&&g.W==b&&g.Ea==!!c&&g.Ga==d)return e}return-1};var Ib="closure_lm_"+(1E6*Math.random()|0),Jb={},Kb=0,F=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)F(a,b[g],c,d,e);return null}c=Lb(c);if(Cb(a))a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Mb(a);h||(a[Ib]=h=new E(a));c=h.add(b,c,!1,d,e);c.Da||(d=Nb(),c.Da=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Ob(b.toString()),d),Kb++);a=c}return a},Nb=function(){var a=Pb,b=wb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
-a.call(b.src,b.W,c);if(!c)return c};return b},Qb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Qb(a,b[g],c,d,e);else c=Lb(c),Cb(a)?a.u(b,c,d,e):a&&(a=Mb(a))&&(b=a.wa(b,c,!!d,e))&&G(b)},G=function(a){if("number"==typeof a||!a||a.fa)return!1;var b=a.src;if(Cb(b))return Hb(b.Y,a);var c=a.type,d=a.Da;b.removeEventListener?b.removeEventListener(c,d,a.Ea):b.detachEvent&&b.detachEvent(Ob(c),d);Kb--;(c=Mb(b))?(Hb(c,a),0==c.ua&&(c.src=null,b[Ib]=null)):Fb(a);return!0},Ob=function(a){return a in Jb?
-Jb[a]:Jb[a]="on"+a},Sb=function(a,b,c,d){var e=1;if(a=Mb(a))if(b=a.m[b.toString()])for(b=Ga(b),a=0;a<b.length;a++){var g=b[a];g&&g.Ea==c&&!g.fa&&(e&=!1!==Rb(g,d))}return Boolean(e)},Rb=function(a,b){var c=a.W,d=a.Ga||a.src;a.Fa&&G(a);return c.call(d,b)},Pb=function(a,b){if(a.fa)return!0;if(!wb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new D(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=
--1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Sb(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Sb(e[k],g,!1,c)}return d}return Rb(a,new D(b,this))},Mb=function(a){a=a[Ib];return a instanceof E?a:null},Tb="__closure_events_fn_"+(1E9*Math.random()>>>0),Lb=function(a){r(a,"Listener can not be null.");if(n(a))return a;r(a.handleEvent,"An object listener must have handleEvent method.");
-return a[Tb]||(a[Tb]=function(b){return a.handleEvent(b)})};var H=function(a){this.Eb=a;this.Na={}};p(H,yb);var Ub=[];H.prototype.c=function(a,b,c,d){da(b)||(b&&(Ub[0]=b.toString()),b=Ub);for(var e=0;e<b.length;e++){var g=F(a,b[e],c||this.handleEvent,d||!1,this.Eb||this);if(!g)break;this.Na[g.key]=g}return this};H.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Eb||this,c=Lb(c),d=!!d,b=Cb(a)?a.wa(b,c,d,e):a?(a=Mb(a))?a.wa(b,c,d,e):null:null,b&&(G(b),delete this.Na[b.key]);return this};
-H.prototype.ab=function(){Na(this.Na,G);this.Na={}};H.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var I=function(){this.Y=new E(this);this.dc=this};p(I,yb);I.prototype[Bb]=!0;f=I.prototype;f.nb=null;f.gb=function(a){this.nb=a};f.addEventListener=function(a,b,c,d){F(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Qb(this,a,b,c,d)};
-f.dispatchEvent=function(a){Vb(this);var b,c=this.nb;if(c){b=[];for(var d=1;c;c=c.nb)b.push(c),r(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new C(a,c);else if(a instanceof C)a.target=a.target||c;else{var e=a;a=new C(d,c);Sa(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Wb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Wb(g,d,!0,a)&&e,a.$||(e=Wb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Wb(g,d,!1,a)&&e;return e};
-f.c=function(a,b,c,d){Vb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Wb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=Ga(b);for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.fa&&h.Ea==c){var k=h.W,q=h.Ga||h.src;h.Fa&&Hb(a.Y,h);e=!1!==k.call(q,d)&&e}}return e&&!1!=d.wb};I.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Vb=function(a){r(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Xb=y?"MozUserSelect":z?"WebkitUserSelect":null,Yb=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Xb){if(b=b?"none":"",a.style[Xb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Xb]=b}}else if(x||Va)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var Zb=function(){};ba(Zb);Zb.prototype.gc=0;var K=function(a){I.call(this);this.A=a||fb();this.sa=$b};p(K,I);K.prototype.fc=Zb.ga();var $b=null,ac=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=K.prototype;f.ha=null;f.f=!1;f.d=null;f.sa=null;f.p=null;f.q=null;f.F=null;
-var bc=function(a){return a.ha||(a.ha=":"+(a.fc.gc++).toString(36))},cc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.ha;d in c&&delete c[d];Pa(a.p.F,b,a)}a.ha=b};K.prototype.a=function(){return this.d};var dc=function(a){a.Ma||(a.Ma=new H(a));return a.Ma},fc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.ha&&ec(a.p,a.ha)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;K.e.gb.call(a,b)};f=K.prototype;f.getParent=function(){return this.p};
-f.gb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");K.e.gb.call(this,a)};f.lb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.X(a)){var b=eb(a);this.A&&this.A.Q==b||(this.A=fb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.f=!0;gc(this,function(a){!a.f&&a.a()&&a.D()})};
-f.ba=function(){gc(this,function(a){a.f&&a.ba()});this.Ma&&this.Ma.ab();this.f=!1};f.Ca=function(a,b){this.Va(a,hc(this),b)};
-f.Va=function(a,b,c){r(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>hc(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=bc(a);this.F[d]=a;Fa(this.q,a)}else Pa(this.F,bc(a),a);fc(a,this);Ia(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=L(this,b+1);b=this.B();c=c?c.d:
+a.metaKey;this.jb=A?a.metaKey:a.ctrlKey;this.state=a.state;this.O=a;a.defaultPrevented&&this.preventDefault()}};p(D,C);var Ab=[1,4,2],Bb=function(a){return wb?0==a.O.button:"click"==a.type?!0:!!(a.O.button&Ab[0])};D.prototype.stopPropagation=function(){D.e.stopPropagation.call(this);this.O.stopPropagation?this.O.stopPropagation():this.O.cancelBubble=!0};
+D.prototype.preventDefault=function(){D.e.preventDefault.call(this);var a=this.O;if(a.preventDefault)a.preventDefault();else if(a.returnValue=!1,yb)try{if(a.ctrlKey||112<=a.keyCode&&123>=a.keyCode)a.keyCode=-1}catch(b){}};var Cb="closure_listenable_"+(1E6*Math.random()|0),Db=function(a){try{return!(!a||!a[Cb])}catch(b){return!1}},Eb=0;var Fb=function(a,b,c,d,e){this.W=a;this.Da=null;this.src=b;this.type=c;this.Ea=!!d;this.Ga=e;this.key=++Eb;this.fa=this.Fa=!1},Gb=function(a){a.fa=!0;a.W=null;a.Da=null;a.src=null;a.Ga=null};var E=function(a){this.src=a;this.m={};this.ua=0};E.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.m[g];a||(a=this.m[g]=[],this.ua++);var h=Hb(a,b,d,e);-1<h?(b=a[h],c||(b.Fa=!1)):(b=new Fb(b,this.src,g,!!d,e),b.Fa=c,a.push(b));return b};E.prototype.remove=function(a,b,c,d){a=a.toString();if(!(a in this.m))return!1;var e=this.m[a];b=Hb(e,b,c,d);return-1<b?(Gb(e[b]),r(null!=e.length),s.splice.call(e,b,1),0==e.length&&(delete this.m[a],this.ua--),!0):!1};
+var Ib=function(a,b){var c=b.type;if(!(c in a.m))return!1;var d=Ga(a.m[c],b);d&&(Gb(b),0==a.m[c].length&&(delete a.m[c],a.ua--));return d};E.prototype.ab=function(a){a=a&&a.toString();var b=0,c;for(c in this.m)if(!a||c==a){for(var d=this.m[c],e=0;e<d.length;e++)++b,Gb(d[e]);delete this.m[c];this.ua--}return b};E.prototype.wa=function(a,b,c,d){a=this.m[a.toString()];var e=-1;a&&(e=Hb(a,b,c,d));return-1<e?a[e]:null};
+var Hb=function(a,b,c,d){for(var e=0;e<a.length;++e){var g=a[e];if(!g.fa&&g.W==b&&g.Ea==!!c&&g.Ga==d)return e}return-1};var Jb="closure_lm_"+(1E6*Math.random()|0),Kb={},Lb=0,F=function(a,b,c,d,e){if(da(b)){for(var g=0;g<b.length;g++)F(a,b[g],c,d,e);return null}c=Mb(c);if(Db(a))a=a.c(b,c,d,e);else{if(!b)throw Error("Invalid event type");var g=!!d,h=Nb(a);h||(a[Jb]=h=new E(a));c=h.add(b,c,!1,d,e);c.Da||(d=Ob(),c.Da=d,d.src=a,d.W=c,a.addEventListener?a.addEventListener(b.toString(),d,g):a.attachEvent(Pb(b.toString()),d),Lb++);a=c}return a},Ob=function(){var a=Qb,b=xb?function(c){return a.call(b.src,b.W,c)}:function(c){c=
+a.call(b.src,b.W,c);if(!c)return c};return b},Rb=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)Rb(a,b[g],c,d,e);else c=Mb(c),Db(a)?a.u(b,c,d,e):a&&(a=Nb(a))&&(b=a.wa(b,c,!!d,e))&&G(b)},G=function(a){if("number"==typeof a||!a||a.fa)return!1;var b=a.src;if(Db(b))return Ib(b.Y,a);var c=a.type,d=a.Da;b.removeEventListener?b.removeEventListener(c,d,a.Ea):b.detachEvent&&b.detachEvent(Pb(c),d);Lb--;(c=Nb(b))?(Ib(c,a),0==c.ua&&(c.src=null,b[Jb]=null)):Gb(a);return!0},Pb=function(a){return a in Kb?
+Kb[a]:Kb[a]="on"+a},Tb=function(a,b,c,d){var e=1;if(a=Nb(a))if(b=a.m[b.toString()])for(b=b.concat(),a=0;a<b.length;a++){var g=b[a];g&&g.Ea==c&&!g.fa&&(e&=!1!==Sb(g,d))}return Boolean(e)},Sb=function(a,b){var c=a.W,d=a.Ga||a.src;a.Fa&&G(a);return c.call(d,b)},Qb=function(a,b){if(a.fa)return!0;if(!xb){var c;if(!(c=b))t:{c=["window","event"];for(var d=l,e;e=c.shift();)if(null!=d[e])d=d[e];else{c=null;break t}c=d}e=c;c=new D(e,this);d=!0;if(!(0>e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=
+-1;break t}catch(h){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,k=e.length-1;!c.$&&0<=k;k--)c.currentTarget=e[k],d&=Tb(e[k],g,!0,c);for(k=0;!c.$&&k<e.length;k++)c.currentTarget=e[k],d&=Tb(e[k],g,!1,c)}return d}return Sb(a,new D(b,this))},Nb=function(a){a=a[Jb];return a instanceof E?a:null},Ub="__closure_events_fn_"+(1E9*Math.random()>>>0),Mb=function(a){r(a,"Listener can not be null.");if(n(a))return a;r(a.handleEvent,"An object listener must have handleEvent method.");
+return a[Ub]||(a[Ub]=function(b){return a.handleEvent(b)})};var H=function(a){this.Eb=a;this.Na={}};p(H,zb);var Vb=[];H.prototype.c=function(a,b,c,d){da(b)||(b&&(Vb[0]=b.toString()),b=Vb);for(var e=0;e<b.length;e++){var g=F(a,b[e],c||this.handleEvent,d||!1,this.Eb||this);if(!g)break;this.Na[g.key]=g}return this};H.prototype.u=function(a,b,c,d,e){if(da(b))for(var g=0;g<b.length;g++)this.u(a,b[g],c,d,e);else c=c||this.handleEvent,e=e||this.Eb||this,c=Mb(c),d=!!d,b=Db(a)?a.wa(b,c,d,e):a?(a=Nb(a))?a.wa(b,c,d,e):null:null,b&&(G(b),delete this.Na[b.key]);return this};
+H.prototype.ab=function(){Oa(this.Na,G);this.Na={}};H.prototype.handleEvent=function(){throw Error("EventHandler.handleEvent not implemented");};var I=function(){this.Y=new E(this);this.dc=this};p(I,zb);I.prototype[Cb]=!0;f=I.prototype;f.nb=null;f.gb=function(a){this.nb=a};f.addEventListener=function(a,b,c,d){F(this,a,b,c,d)};f.removeEventListener=function(a,b,c,d){Rb(this,a,b,c,d)};
+f.dispatchEvent=function(a){Wb(this);var b,c=this.nb;if(c){b=[];for(var d=1;c;c=c.nb)b.push(c),r(1E3>++d,"infinite loop")}c=this.dc;d=a.type||a;if(m(a))a=new C(a,c);else if(a instanceof C)a.target=a.target||c;else{var e=a;a=new C(d,c);Ta(a,e)}var e=!0,g;if(b)for(var h=b.length-1;!a.$&&0<=h;h--)g=a.currentTarget=b[h],e=Xb(g,d,!0,a)&&e;a.$||(g=a.currentTarget=c,e=Xb(g,d,!0,a)&&e,a.$||(e=Xb(g,d,!1,a)&&e));if(b)for(h=0;!a.$&&h<b.length;h++)g=a.currentTarget=b[h],e=Xb(g,d,!1,a)&&e;return e};
+f.c=function(a,b,c,d){Wb(this);return this.Y.add(String(a),b,!1,c,d)};f.u=function(a,b,c,d){return this.Y.remove(String(a),b,c,d)};var Xb=function(a,b,c,d){b=a.Y.m[String(b)];if(!b)return!0;b=b.concat();for(var e=!0,g=0;g<b.length;++g){var h=b[g];if(h&&!h.fa&&h.Ea==c){var k=h.W,q=h.Ga||h.src;h.Fa&&Ib(a.Y,h);e=!1!==k.call(q,d)&&e}}return e&&!1!=d.wb};I.prototype.wa=function(a,b,c,d){return this.Y.wa(String(a),b,c,d)};var Wb=function(a){r(a.Y,"Event target is not initialized. Did you call the superclass (goog.events.EventTarget) constructor?")};var J=function(a,b){a.style.display=b?"":"none"},Yb=y?"MozUserSelect":z?"WebkitUserSelect":null,Zb=function(a,b,c){c=c?null:a.getElementsByTagName("*");if(Yb){if(b=b?"none":"",a.style[Yb]=b,c){a=0;for(var d;d=c[a];a++)d.style[Yb]=b}}else if(x||Wa)if(b=b?"on":"",a.setAttribute("unselectable",b),c)for(a=0;d=c[a];a++)d.setAttribute("unselectable",b)};var $b=function(){};ba($b);$b.prototype.gc=0;var K=function(a){I.call(this);this.A=a||gb();this.sa=ac};p(K,I);K.prototype.fc=$b.ga();var ac=null,bc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close"}throw Error("Invalid component state");};f=K.prototype;f.ha=null;f.f=!1;f.d=null;f.sa=null;f.p=null;f.q=null;f.F=null;
+var cc=function(a){return a.ha||(a.ha=":"+(a.fc.gc++).toString(36))},dc=function(a,b){if(a.p&&a.p.F){var c=a.p.F,d=a.ha;d in c&&delete c[d];Qa(a.p.F,b,a)}a.ha=b};K.prototype.a=function(){return this.d};var ec=function(a){a.Ma||(a.Ma=new H(a));return a.Ma},gc=function(a,b){if(a==b)throw Error("Unable to set parent component");if(b&&a.p&&a.ha&&fc(a.p,a.ha)&&a.p!=b)throw Error("Unable to set parent component");a.p=b;K.e.gb.call(a,b)};f=K.prototype;f.getParent=function(){return this.p};
+f.gb=function(a){if(this.p&&this.p!=a)throw Error("Method not supported");K.e.gb.call(this,a)};f.lb=function(){return this.A};f.o=function(){this.d=this.A.createElement("div")};f.K=function(a){if(this.f)throw Error("Component already rendered");if(a&&this.X(a)){var b=fb(a);this.A&&this.A.Q==b||(this.A=gb(a));this.Za(a);this.D()}else throw Error("Invalid element to decorate");};f.X=function(){return!0};f.Za=function(a){this.d=a};f.D=function(){this.f=!0;hc(this,function(a){!a.f&&a.a()&&a.D()})};
+f.ba=function(){hc(this,function(a){a.f&&a.ba()});this.Ma&&this.Ma.ab();this.f=!1};f.Ca=function(a,b){this.Va(a,ic(this),b)};
+f.Va=function(a,b,c){r(!!a,"Provided element must not be null.");if(a.f&&(c||!this.f))throw Error("Component already rendered");if(0>b||b>ic(this))throw Error("Child component index out of bounds");this.F&&this.q||(this.F={},this.q=[]);if(a.getParent()==this){var d=cc(a);this.F[d]=a;Ga(this.q,a)}else Qa(this.F,cc(a),a);gc(a,this);Ja(this.q,b,0,a);if(a.f&&this.f&&a.getParent()==this)c=this.B(),c.insertBefore(a.a(),c.childNodes[b]||null);else if(c){this.d||this.o();c=L(this,b+1);b=this.B();c=c?c.d:
 null;if(a.f)throw Error("Component already rendered");a.d||a.o();b?b.insertBefore(a.d,c||null):a.A.Q.body.appendChild(a.d);a.p&&!a.p.f||a.D()}else this.f&&!a.f&&a.d&&a.d.parentNode&&1==a.d.parentNode.nodeType&&a.D()};f.B=function(){return this.d};
-var ic=function(a){if(null==a.sa){var b=a.f?a.d:a.A.Q.body,c;t:{c=eb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.sa="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.sa};K.prototype.pa=function(a){if(this.f)throw Error("Component already rendered");this.sa=a};
-var hc=function(a){return a.q?a.q.length:0},ec=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},L=function(a,b){return a.q?a.q[b]||null:null},gc=function(a,b,c){a.q&&t(a.q,b,c)},jc=function(a,b){return a.q&&b?Ca(a.q,b):-1};
-K.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:bc(a);a=ec(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Fa(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));fc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var kc,lc={nc:"activedescendant",sc:"atomic",tc:"autocomplete",vc:"busy",yc:"checked",Dc:"controls",Fc:"describedby",Ic:"disabled",Kc:"dropeffect",Lc:"expanded",Mc:"flowto",Oc:"grabbed",Sc:"haspopup",Uc:"hidden",Wc:"invalid",Xc:"label",Yc:"labelledby",Zc:"level",dd:"live",od:"multiline",pd:"multiselectable",td:"orientation",ud:"owns",vd:"posinset",xd:"pressed",Bd:"readonly",Dd:"relevant",Ed:"required",Kd:"selected",Md:"setsize",Od:"sort",ae:"valuemax",be:"valuemin",ce:"valuenow",de:"valuetext"};var mc={oc:"alert",pc:"alertdialog",qc:"application",rc:"article",uc:"banner",wc:"button",xc:"checkbox",zc:"columnheader",Ac:"combobox",Bc:"complementary",Cc:"contentinfo",Ec:"definition",Gc:"dialog",Hc:"directory",Jc:"document",Nc:"form",Pc:"grid",Qc:"gridcell",Rc:"group",Tc:"heading",Vc:"img",$c:"link",ad:"list",bd:"listbox",cd:"listitem",ed:"log",fd:"main",gd:"marquee",hd:"math",jd:"menu",kd:"menubar",ld:"menuitem",md:"menuitemcheckbox",nd:"menuitemradio",qd:"navigation",rd:"note",sd:"option",
-wd:"presentation",yd:"progressbar",zd:"radio",Ad:"radiogroup",Cd:"region",Fd:"row",Gd:"rowgroup",Hd:"rowheader",Id:"scrollbar",Jd:"search",Ld:"separator",Nd:"slider",Pd:"spinbutton",Qd:"status",Rd:"tab",Sd:"tablist",Td:"tabpanel",Ud:"textbox",Vd:"timer",Wd:"toolbar",Xd:"tooltip",Yd:"tree",Zd:"treegrid",$d:"treeitem"};var nc=function(a,b){b?(r(Oa(mc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},pc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=oc(b);""===c||void 0==c?(kc||(kc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=kc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
-c)},oc=function(a){r(a,"ARIA attribute cannot be empty.");r(Oa(lc,a),"No such ARIA attribute "+a);return"aria-"+a};var sc=function(a,b,c,d,e){if(!(x||z&&B("525")))return!0;if(A&&e)return qc(a);if(e&&!d)return!1;"number"==typeof b&&(b=rc(b));if(!c&&(17==b||18==b||A&&91==b))return!1;if(z&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(x&&d&&b==a)return!1;switch(a){case 13:return!(x&&x&&9<=ab);case 27:return!z}return qc(a)},qc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||z&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
-default:return!1}},rc=function(a){if(y)a=tc(a);else if(A&&z)t:switch(a){case 93:a=91;break t}return a},tc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var M=function(a,b){I.call(this);a&&uc(this,a,b)};p(M,I);f=M.prototype;f.d=null;f.Ha=null;f.Ya=null;f.Ia=null;f.r=-1;f.N=-1;f.kb=!1;
-var vc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},wc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},xc=x||z&&B("525"),yc=A&&y;
-M.prototype.Sb=function(a){z&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||A&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));xc&&!sc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=rc(a.keyCode),yc&&(this.kb=a.altKey))};M.prototype.Tb=function(a){this.N=this.r=-1;this.kb=a.altKey};
-M.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;x&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):z&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&qc(c)?b.charCode:0):Va?(c=this.N,d=qc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,yc&&(e=this.kb),A&&63==d&&224==c&&(c=191));var g=c=rc(c),h=b.keyIdentifier;c?63232<=c&&c in vc?g=vc[c]:25==c&&a.shiftKey&&(g=9):h&&h in wc&&(g=wc[h]);a=g==this.r;this.r=g;b=new zc(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
-M.prototype.a=function(){return this.d};var uc=function(a,b,c){a.Ia&&a.detach();a.d=b;a.Ha=F(a.d,"keypress",a,c);a.Ya=F(a.d,"keydown",a.Sb,c,a);a.Ia=F(a.d,"keyup",a.Tb,c,a)};M.prototype.detach=function(){this.Ha&&(G(this.Ha),G(this.Ya),G(this.Ia),this.Ia=this.Ya=this.Ha=null);this.d=null;this.N=this.r=-1};var zc=function(a,b,c,d){D.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(zc,D);var O=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Ac=function(a,b){return a.classList?a.classList.contains(b):u(O(a),b)},P=function(a,b){a.classList?a.classList.add(b):Ac(a,b)||(a.className+=0<a.className.length?" "+b:b)},Bc=function(a,b){if(a.classList)t(b,function(b){P(a,b)});else{var c={};t(O(a),function(a){c[a]=!0});t(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Cc=function(a,b){a.classList?
-a.classList.remove(b):Ac(a,b)&&(a.className=Da(O(a),function(a){return a!=b}).join(" "))},Dc=function(a,b){a.classList?t(b,function(b){Cc(a,b)}):a.className=Da(O(a),function(a){return!u(b,a)}).join(" ")};var Fc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Ec[a]=b},Gc={},Ec={};var Q=function(a){this.Gb=a};ba(Q);Q.prototype.da=function(){return this.Gb};var Hc=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.o=function(a){return a.lb().o("div",this.ta(a).join(" "))};f.B=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&cc(a,b.id);var c=this.v(),d=!1,e=O(b);e&&t(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||P(b,c);Ic(a,this.B(b));return b};
-f.bb=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?Jc(a,"horizontal"):b==c+"-vertical"&&Jc(a,"vertical")};var Ic=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;r(g);for(var g=O(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Ec?Ec[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.qa(!1),a.Ca(e),e.K(c))}else c.nodeValue&&""!=qa(c.nodeValue)||b.removeChild(c);c=d}};
-Q.prototype.Oa=function(a){a=a.a();r(a,"The container DOM element cannot be null.");Yb(a,!0,y);x&&(a.hideFocus=!0);var b=this.da();b&&nc(a,b)};Q.prototype.j=function(a){return a.a()};Q.prototype.v=function(){return"goog-container"};Q.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Kc;ba(R);var Lc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=R.prototype;f.da=function(){};f.o=function(a){var b=a.lb().o("div",this.ta(a).join(" "),a.Ba);Mc(a,b);return b};f.B=function(a){return a};f.ra=function(a,b,c){if(a=a.a?a.a():a){var d=[b];x&&!B("7")&&(d=Nc(O(a),b),d.push(b));(c?Bc:Dc)(a,d)}};f.X=function(){return!0};
-f.K=function(a,b){b.id&&cc(a,b.id);var c=this.B(b);c&&c.firstChild?Oc(a,c.firstChild.nextSibling?Ga(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,c=!1,q=Ga(O(b));t(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.ub||(this.Ja||Pc(this),this.ub=Qa(this.Ja));a=parseInt(this.ub[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ia=a.G;ia&&q.push.apply(q,ia);if(x&&!B("7")){var N=Nc(q);0<N.length&&(q.push.apply(q,
-N),c=!0)}if(!h||!k||ia||c)b.className=q.join(" ");Mc(a,b);return b};f.Oa=function(a){ic(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.na(a,a.s())};var Qc=function(a,b,c){if(a=c||a.da())r(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&nc(b,a)},Mc=function(a,b){r(a);r(b);a.s()||pc(b,"hidden",!a.s());a.isEnabled()||Rc(b,1,!a.isEnabled());a.l&8&&Rc(b,8,!!(a.g&8));a.l&16&&Rc(b,16,!!(a.g&16));a.l&64&&Rc(b,64,!!(a.g&64))};f=R.prototype;
-f.za=function(a,b){Yb(a,!b,!x&&!Va)};f.pa=function(a,b){this.ra(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?rb(b)&&sb(b):!1};f.na=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}(rb(c)&&sb(c))!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){J(a,b);a&&pc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Sc(this,b);e&&this.ra(a,e,c);Rc(d,b,c)}};
-var Rc=function(a,b,c){Kc||(Kc={1:"disabled",8:"selected",16:"checked",64:"expanded"});r(a,"The element passed as a first parameter cannot be null.");b=Kc[b];var d=a.getAttribute("role")||null;d&&(d=Lc[d]||b,b="checked"==b||"selected"==b?d:b);b&&pc(a,b,c)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-control"};
-R.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Sc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);x&&!B("7")&&c.push.apply(c,Nc(c));return c};
-var Nc=function(a,b){var c=[];b&&(a=a.concat([b]));t([],function(d){!Ea(d,la(u,a))||b&&!u(d,b)||c.push(d.join("_"))});return c},Sc=function(a,b){a.Ja||Pc(a);return a.Ja[b]},Pc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");r(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Ja={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){K.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ka(b);if(d=Gc[d])break;b=b.e?b.e.constructor:null}b=d?n(d.ga)?d.ga():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(S,K);f=S.prototype;f.Ba=null;f.g=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.rb=null;f.pb=function(){return this.Z};f.Pa=function(a){this.f&&a!=this.Z&&Tc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new M)};f.Ab=function(){return this.b};
-f.ra=function(a,b){b?a&&(this.G?u(this.G,a)||this.G.push(a):this.G=[a],this.b.ra(this,a,!0)):a&&this.G&&Fa(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.ra(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Qc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.s()||this.b.ja(a,!1)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Qc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
-f.D=function(){S.e.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.pb()&&Tc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();uc(b,a);dc(this).c(b,"key",this.J).c(a,"focus",this.ma).c(a,"blur",this.la)}}};
-var Tc=function(a,b){var c=dc(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.ka).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.oa!=aa&&c.c(d,"contextmenu",a.oa),x&&c.c(d,"dblclick",a.tb)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.ka).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.oa!=aa&&c.u(d,"contextmenu",a.oa),x&&c.u(d,"dblclick",a.tb))};S.prototype.ba=function(){S.e.ba.call(this);this.ea&&this.ea.detach();this.s()&&this.isEnabled()&&this.b.na(this,!1)};var Oc=function(a,b){a.Ba=b};f=S.prototype;
+var jc=function(a){if(null==a.sa){var b=a.f?a.d:a.A.Q.body,c;t:{c=fb(b);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(b,null))){c=c.direction||c.getPropertyValue("direction")||"";break t}c=""}a.sa="rtl"==(c||(b.currentStyle?b.currentStyle.direction:null)||b.style&&b.style.direction)}return a.sa};K.prototype.pa=function(a){if(this.f)throw Error("Component already rendered");this.sa=a};
+var ic=function(a){return a.q?a.q.length:0},fc=function(a,b){var c;a.F&&b?(c=a.F,c=(b in c?c[b]:void 0)||null):c=null;return c},L=function(a,b){return a.q?a.q[b]||null:null},hc=function(a,b,c){a.q&&t(a.q,b,c)},kc=function(a,b){return a.q&&b?Da(a.q,b):-1};
+K.prototype.removeChild=function(a,b){if(a){var c=m(a)?a:cc(a);a=fc(this,c);if(c&&a){var d=this.F;c in d&&delete d[c];Ga(this.q,a);b&&(a.ba(),a.d&&(c=a.d)&&c.parentNode&&c.parentNode.removeChild(c));gc(a,null)}}if(!a)throw Error("Child is not in parent component");return a};var lc,mc={nc:"activedescendant",sc:"atomic",tc:"autocomplete",vc:"busy",yc:"checked",Dc:"controls",Fc:"describedby",Ic:"disabled",Kc:"dropeffect",Lc:"expanded",Mc:"flowto",Oc:"grabbed",Sc:"haspopup",Uc:"hidden",Wc:"invalid",Xc:"label",Yc:"labelledby",Zc:"level",dd:"live",od:"multiline",pd:"multiselectable",td:"orientation",ud:"owns",vd:"posinset",xd:"pressed",Bd:"readonly",Dd:"relevant",Ed:"required",Kd:"selected",Md:"setsize",Od:"sort",ae:"valuemax",be:"valuemin",ce:"valuenow",de:"valuetext"};var nc={oc:"alert",pc:"alertdialog",qc:"application",rc:"article",uc:"banner",wc:"button",xc:"checkbox",zc:"columnheader",Ac:"combobox",Bc:"complementary",Cc:"contentinfo",Ec:"definition",Gc:"dialog",Hc:"directory",Jc:"document",Nc:"form",Pc:"grid",Qc:"gridcell",Rc:"group",Tc:"heading",Vc:"img",$c:"link",ad:"list",bd:"listbox",cd:"listitem",ed:"log",fd:"main",gd:"marquee",hd:"math",jd:"menu",kd:"menubar",ld:"menuitem",md:"menuitemcheckbox",nd:"menuitemradio",qd:"navigation",rd:"note",sd:"option",
+wd:"presentation",yd:"progressbar",zd:"radio",Ad:"radiogroup",Cd:"region",Fd:"row",Gd:"rowgroup",Hd:"rowheader",Id:"scrollbar",Jd:"search",Ld:"separator",Nd:"slider",Pd:"spinbutton",Qd:"status",Rd:"tab",Sd:"tablist",Td:"tabpanel",Ud:"textbox",Vd:"timer",Wd:"toolbar",Xd:"tooltip",Yd:"tree",Zd:"treegrid",$d:"treeitem"};var oc=function(a,b){b?(r(Pa(nc,b),"No such ARIA role "+b),a.setAttribute("role",b)):a.removeAttribute("role")},qc=function(a,b,c){ea(c)&&(c=c.join(" "));var d=pc(b);""===c||void 0==c?(lc||(lc={atomic:!1,autocomplete:"none",dropeffect:"none",haspopup:!1,live:"off",multiline:!1,multiselectable:!1,orientation:"vertical",readonly:!1,relevant:"additions text",required:!1,sort:"none",busy:!1,disabled:!1,hidden:!1,invalid:"false"}),c=lc,b in c?a.setAttribute(d,c[b]):a.removeAttribute(d)):a.setAttribute(d,
+c)},pc=function(a){r(a,"ARIA attribute cannot be empty.");r(Pa(mc,a),"No such ARIA attribute "+a);return"aria-"+a};var tc=function(a,b,c,d,e){if(!(x||z&&B("525")))return!0;if(A&&e)return rc(a);if(e&&!d)return!1;"number"==typeof b&&(b=sc(b));if(!c&&(17==b||18==b||A&&91==b))return!1;if(z&&d&&c)switch(a){case 220:case 219:case 221:case 192:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:return!1}if(x&&d&&b==a)return!1;switch(a){case 13:return!(x&&x&&9<=bb);case 27:return!z}return rc(a)},rc=function(a){if(48<=a&&57>=a||96<=a&&106>=a||65<=a&&90>=a||z&&0==a)return!0;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 59:case 189:case 187:case 61:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return!0;
+default:return!1}},sc=function(a){if(y)a=uc(a);else if(A&&z)t:switch(a){case 93:a=91;break t}return a},uc=function(a){switch(a){case 61:return 187;case 59:return 186;case 173:return 189;case 224:return 91;case 0:return 224;default:return a}};var M=function(a,b){I.call(this);a&&vc(this,a,b)};p(M,I);f=M.prototype;f.d=null;f.Ha=null;f.Ya=null;f.Ia=null;f.r=-1;f.N=-1;f.kb=!1;
+var wc={3:13,12:144,63232:38,63233:40,63234:37,63235:39,63236:112,63237:113,63238:114,63239:115,63240:116,63241:117,63242:118,63243:119,63244:120,63245:121,63246:122,63247:123,63248:44,63272:46,63273:36,63275:35,63276:33,63277:34,63289:144,63302:45},xc={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},yc=x||z&&B("525"),zc=A&&y;
+M.prototype.Sb=function(a){z&&(17==this.r&&!a.ctrlKey||18==this.r&&!a.altKey||A&&91==this.r&&!a.metaKey)&&(this.N=this.r=-1);-1==this.r&&(a.ctrlKey&&17!=a.keyCode?this.r=17:a.altKey&&18!=a.keyCode?this.r=18:a.metaKey&&91!=a.keyCode&&(this.r=91));yc&&!tc(a.keyCode,this.r,a.shiftKey,a.ctrlKey,a.altKey)?this.handleEvent(a):(this.N=sc(a.keyCode),zc&&(this.kb=a.altKey))};M.prototype.Tb=function(a){this.N=this.r=-1;this.kb=a.altKey};
+M.prototype.handleEvent=function(a){var b=a.O,c,d,e=b.altKey;x&&"keypress"==a.type?(c=this.N,d=13!=c&&27!=c?b.keyCode:0):z&&"keypress"==a.type?(c=this.N,d=0<=b.charCode&&63232>b.charCode&&rc(c)?b.charCode:0):Wa?(c=this.N,d=rc(c)?b.keyCode:0):(c=b.keyCode||this.N,d=b.charCode||0,zc&&(e=this.kb),A&&63==d&&224==c&&(c=191));var g=c=sc(c),h=b.keyIdentifier;c?63232<=c&&c in wc?g=wc[c]:25==c&&a.shiftKey&&(g=9):h&&h in xc&&(g=xc[h]);a=g==this.r;this.r=g;b=new Ac(g,d,a,b);b.altKey=e;this.dispatchEvent(b)};
+M.prototype.a=function(){return this.d};var vc=function(a,b,c){a.Ia&&a.detach();a.d=b;a.Ha=F(a.d,"keypress",a,c);a.Ya=F(a.d,"keydown",a.Sb,c,a);a.Ia=F(a.d,"keyup",a.Tb,c,a)};M.prototype.detach=function(){this.Ha&&(G(this.Ha),G(this.Ya),G(this.Ia),this.Ia=this.Ya=this.Ha=null);this.d=null;this.N=this.r=-1};var Ac=function(a,b,c,d){D.call(this,d);this.type="key";this.keyCode=a;this.charCode=b;this.repeat=c};p(Ac,D);var O=function(a){if(a.classList)return a.classList;a=a.className;return m(a)&&a.match(/\S+/g)||[]},Bc=function(a,b){return a.classList?a.classList.contains(b):u(O(a),b)},P=function(a,b){a.classList?a.classList.add(b):Bc(a,b)||(a.className+=0<a.className.length?" "+b:b)},Cc=function(a,b){if(a.classList)t(b,function(b){P(a,b)});else{var c={};t(O(a),function(a){c[a]=!0});t(b,function(a){c[a]=!0});a.className="";for(var d in c)a.className+=0<a.className.length?" "+d:d}},Dc=function(a,b){a.classList?
+a.classList.remove(b):Bc(a,b)&&(a.className=Ea(O(a),function(a){return a!=b}).join(" "))},Ec=function(a,b){a.classList?t(b,function(b){Dc(a,b)}):a.className=Ea(O(a),function(a){return!u(b,a)}).join(" ")};var Gc=function(a,b){if(!a)throw Error("Invalid class name "+a);if(!n(b))throw Error("Invalid decorator function "+b);Fc[a]=b},Hc={},Fc={};var Q=function(a){this.Gb=a};ba(Q);Q.prototype.da=function(){return this.Gb};var Ic=function(a,b){a&&(a.tabIndex=b?0:-1)};f=Q.prototype;f.o=function(a){return a.lb().o("div",this.ta(a).join(" "))};f.B=function(a){return a};f.X=function(a){return"DIV"==a.tagName};f.K=function(a,b){b.id&&dc(a,b.id);var c=this.v(),d=!1,e=O(b);e&&t(e,function(b){b==c?d=!0:b&&this.bb(a,b,c)},this);d||P(b,c);Jc(a,this.B(b));return b};
+f.bb=function(a,b,c){b==c+"-disabled"?a.qa(!1):b==c+"-horizontal"?Kc(a,"horizontal"):b==c+"-vertical"&&Kc(a,"vertical")};var Jc=function(a,b){if(b)for(var c=b.firstChild,d;c&&c.parentNode==b;){d=c.nextSibling;if(1==c.nodeType){var e;t:{var g=c;e=void 0;r(g);for(var g=O(g),h=0,k=g.length;h<k;h++)if(e=g[h],e=e in Fc?Fc[e]():null)break t;e=null}e&&(e.d=c,a.isEnabled()||e.qa(!1),a.Ca(e),e.K(c))}else c.nodeValue&&""!=qa(c.nodeValue)||b.removeChild(c);c=d}};
+Q.prototype.Oa=function(a){a=a.a();r(a,"The container DOM element cannot be null.");Zb(a,!0,y);x&&(a.hideFocus=!0);var b=this.da();b&&oc(a,b)};Q.prototype.j=function(a){return a.a()};Q.prototype.v=function(){return"goog-container"};Q.prototype.ta=function(a){var b=this.v(),c=[b,"horizontal"==a.L?b+"-horizontal":b+"-vertical"];a.isEnabled()||c.push(b+"-disabled");return c};var R=function(){},Lc;ba(R);var Mc={button:"pressed",checkbox:"checked",menuitem:"selected",menuitemcheckbox:"checked",menuitemradio:"checked",radio:"checked",tab:"selected",treeitem:"selected"};f=R.prototype;f.da=function(){};f.o=function(a){var b=a.lb().o("div",this.ta(a).join(" "),a.Ba);Nc(a,b);return b};f.B=function(a){return a};f.ra=function(a,b,c){if(a=a.a?a.a():a){var d=[b];x&&!B("7")&&(d=Oc(O(a),b),d.push(b));(c?Cc:Ec)(a,d)}};f.X=function(){return!0};
+f.K=function(a,b){b.id&&dc(a,b.id);var c=this.B(b);c&&c.firstChild?Pc(a,c.firstChild.nextSibling?Ha(c.childNodes):c.firstChild):a.Ba=null;var d=0,e=this.v(),g=this.v(),h=!1,k=!1,c=!1,q=Ha(O(b));t(q,function(a){if(h||a!=e)if(k||a!=g){var b=d;this.ub||(this.Ja||Qc(this),this.ub=Ra(this.Ja));a=parseInt(this.ub[a],10);d=b|(isNaN(a)?0:a)}else k=!0;else h=!0,g==e&&(k=!0)},this);a.g=d;h||(q.push(e),g==e&&(k=!0));k||q.push(g);var ha=a.G;ha&&q.push.apply(q,ha);if(x&&!B("7")){var N=Oc(q);0<N.length&&(q.push.apply(q,
+N),c=!0)}if(!h||!k||ha||c)b.className=q.join(" ");Nc(a,b);return b};f.Oa=function(a){jc(a)&&this.pa(a.a(),!0);a.isEnabled()&&this.na(a,a.s())};var Rc=function(a,b,c){if(a=c||a.da())r(b,"The element passed as a first parameter cannot be null."),c=b.getAttribute("role")||null,a!=c&&oc(b,a)},Nc=function(a,b){r(a);r(b);a.s()||qc(b,"hidden",!a.s());a.isEnabled()||Sc(b,1,!a.isEnabled());a.l&8&&Sc(b,8,!!(a.g&8));a.l&16&&Sc(b,16,!!(a.g&16));a.l&64&&Sc(b,64,!!(a.g&64))};f=R.prototype;
+f.za=function(a,b){Zb(a,!b,!x&&!Wa)};f.pa=function(a,b){this.ra(a,this.v()+"-rtl",b)};f.I=function(a){var b;return a.l&32&&(b=a.j())?sb(b)&&tb(b):!1};f.na=function(a,b){var c;if(a.l&32&&(c=a.j())){if(!b&&a.g&32){try{c.blur()}catch(d){}a.g&32&&a.la(null)}(sb(c)&&tb(c))!=b&&(b?c.tabIndex=0:(c.tabIndex=-1,c.removeAttribute("tabIndex")))}};f.ja=function(a,b){J(a,b);a&&qc(a,"hidden",!b)};f.t=function(a,b,c){var d=a.a();if(d){var e=Tc(this,b);e&&this.ra(a,e,c);Sc(d,b,c)}};
+var Sc=function(a,b,c){Lc||(Lc={1:"disabled",8:"selected",16:"checked",64:"expanded"});r(a,"The element passed as a first parameter cannot be null.");b=Lc[b];var d=a.getAttribute("role")||null;d&&(d=Mc[d]||b,b="checked"==b||"selected"==b?d:b);b&&qc(a,b,c)};R.prototype.j=function(a){return a.a()};R.prototype.v=function(){return"goog-control"};
+R.prototype.ta=function(a){var b=this.v(),c=[b],d=this.v();d!=b&&c.push(d);b=a.g;for(d=[];b;){var e=b&-b;d.push(Tc(this,e));b&=~e}c.push.apply(c,d);(a=a.G)&&c.push.apply(c,a);x&&!B("7")&&c.push.apply(c,Oc(c));return c};
+var Oc=function(a,b){var c=[];b&&(a=a.concat([b]));t([],function(d){!Fa(d,la(u,a))||b&&!u(d,b)||c.push(d.join("_"))});return c},Tc=function(a,b){a.Ja||Qc(a);return a.Ja[b]},Qc=function(a){var b=a.v(),c=b.replace(/\xa0|\s/g," ");r(-1==c.indexOf(" "),"ControlRenderer has an invalid css class: '"+b+"'");a.Ja={1:b+"-disabled",2:b+"-hover",4:b+"-active",8:b+"-selected",16:b+"-checked",32:b+"-focused",64:b+"-open"}};var S=function(a,b,c){K.call(this,c);if(!b){b=this.constructor;for(var d;b;){d=ka(b);if(d=Hc[d])break;b=b.e?b.e.constructor:null}b=d?n(d.ga)?d.ga():new d:null}this.b=b;this.Ba=void 0!==a?a:null};p(S,K);f=S.prototype;f.Ba=null;f.g=0;f.l=39;f.ec=255;f.T=0;f.n=!0;f.G=null;f.Z=!0;f.xa=!1;f.rb=null;f.pb=function(){return this.Z};f.Pa=function(a){this.f&&a!=this.Z&&Uc(this,a);this.Z=a};f.j=function(){return this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new M)};f.Ab=function(){return this.b};
+f.ra=function(a,b){b?a&&(this.G?u(this.G,a)||this.G.push(a):this.G=[a],this.b.ra(this,a,!0)):a&&this.G&&Ga(this.G,a)&&(0==this.G.length&&(this.G=null),this.b.ra(this,a,!1))};f.o=function(){var a=this.b.o(this);this.d=a;Rc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.s()||this.b.ja(a,!1)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};f.Za=function(a){this.d=a=this.b.K(this,a);Rc(this.b,a,this.rb);this.xa||this.b.za(a,!1);this.n="none"!=a.style.display};
+f.D=function(){S.e.D.call(this);this.b.Oa(this);if(this.l&-2&&(this.pb()&&Uc(this,!0),this.l&32)){var a=this.j();if(a){var b=this.ya();vc(b,a);ec(this).c(b,"key",this.J).c(a,"focus",this.ma).c(a,"blur",this.la)}}};
+var Uc=function(a,b){var c=ec(a),d=a.a();b?(c.c(d,"mouseover",a.Sa).c(d,"mousedown",a.ka).c(d,"mouseup",a.Ta).c(d,"mouseout",a.Ra),a.oa!=aa&&c.c(d,"contextmenu",a.oa),x&&c.c(d,"dblclick",a.tb)):(c.u(d,"mouseover",a.Sa).u(d,"mousedown",a.ka).u(d,"mouseup",a.Ta).u(d,"mouseout",a.Ra),a.oa!=aa&&c.u(d,"contextmenu",a.oa),x&&c.u(d,"dblclick",a.tb))};S.prototype.ba=function(){S.e.ba.call(this);this.ea&&this.ea.detach();this.s()&&this.isEnabled()&&this.b.na(this,!1)};var Pc=function(a,b){a.Ba=b};f=S.prototype;
 f.pa=function(a){S.e.pa.call(this,a);var b=this.a();b&&this.b.pa(b,a)};f.za=function(a){this.xa=a;var b=this.a();b&&this.b.za(b,a)};f.s=function(){return this.n};f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){var c=this.a();c&&this.b.ja(c,a);this.isEnabled()&&this.b.na(this,a);this.n=a;return!0}return!1};f.isEnabled=function(){return!(this.g&1)};
-f.qa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.s()&&this.b.na(this,a),this.t(1,!a))};f.C=function(a){T(this,2,a)&&this.t(2,a)};f.setActive=function(a){T(this,4,a)&&this.t(4,a)};var Uc=function(a,b){T(a,8,b)&&a.t(8,b)},Vc=function(a,b){T(a,64,b)&&a.t(64,b)};S.prototype.t=function(a,b){this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a)};
-var Wc=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},U=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},T=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.T&b)||a.dispatchEvent(ac(b,c)))&&!a.Ub};f=S.prototype;f.Sa=function(a){(!a.relatedTarget||!nb(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2)&&this.C(!0)};
-f.Ra=function(a){a.relatedTarget&&nb(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.C(!1))};f.oa=aa;f.ka=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),!Ab(a)||z&&A&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Ab(a)||z&&A&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),this.g&4&&Xc(this,a)&&U(this,4)&&this.setActive(!1))};
-f.tb=function(a){this.isEnabled()&&Xc(this,a)};var Xc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.t(16,c)}U(a,8)&&Uc(a,!0);U(a,64)&&Vc(a,!(a.g&64));c=new C("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.jb=b.jb);return a.dispatchEvent(c)};S.prototype.ma=function(){U(this,32)&&T(this,32,!0)&&this.t(32,!0)};S.prototype.la=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.t(32,!1)};
-S.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.mb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.mb=function(a){return 13==a.keyCode&&Xc(this,a)};if(!n(S))throw Error("Invalid component class "+S);if(!n(R))throw Error("Invalid renderer class "+R);var Yc=ka(S);Gc[Yc]=R;Fc("goog-control",function(){return new S(null)});var V=function(a,b,c){K.call(this,c);this.b=b||Q.ga();this.L=a||"vertical"};p(V,K);f=V.prototype;f.vb=null;f.ea=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.h=-1;f.i=null;f.aa=!1;f.Rb=!1;f.Qb=!0;f.M=null;f.j=function(){return this.vb||this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new M(this.j()))};f.Ab=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};
-f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){V.e.D.call(this);gc(this,function(a){a.f&&Zc(this,a)},this);var a=this.a();this.b.Oa(this);this.ja(this.n,!0);dc(this).c(this,"enter",this.Kb).c(this,"highlight",this.Lb).c(this,"unhighlight",this.Nb).c(this,"open",this.Mb).c(this,"close",this.Ib).c(a,"mousedown",this.ka).c(eb(a),"mouseup",this.Jb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Hb);this.I()&&$c(this,!0)};
-var $c=function(a,b){var c=dc(a),d=a.j();b?c.c(d,"focus",a.ma).c(d,"blur",a.la).c(a.ya(),"key",a.J):c.u(d,"focus",a.ma).u(d,"blur",a.la).u(a.ya(),"key",a.J)};f=V.prototype;f.ba=function(){ad(this,-1);this.i&&Vc(this.i,!1);this.aa=!1;V.e.ba.call(this)};f.Kb=function(){return!0};
-f.Lb=function(a){var b=jc(this,a.target);if(-1<b&&b!=this.h){var c=L(this,this.h);c&&c.C(!1);this.h=b;c=L(this,this.h);this.aa&&c.setActive(!0);this.Qb&&this.i&&c!=this.i&&(c.l&64?Vc(c,!0):Vc(this.i,!1))}b=this.a();r(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&pc(b,"activedescendant",a.target.a().id)};f.Nb=function(a){a.target==L(this,this.h)&&(this.h=-1);a=this.a();r(a,"The DOM element for the container cannot be null.");a.removeAttribute(oc("activedescendant"))};
-f.Mb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Vc(this.i,!1),this.i=a)};f.Ib=function(a){a.target==this.i&&(this.i=null)};f.ka=function(a){this.U&&(this.aa=!0);var b=this.j();b&&rb(b)&&sb(b)?b.focus():a.preventDefault()};f.Jb=function(){this.aa=!1};
-f.Hb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.oa(a)}};f.ma=function(){};f.la=function(){ad(this,-1);this.aa=!1;this.i&&Vc(this.i,!1)};
-f.J=function(a){return this.isEnabled()&&this.s()&&(0!=hc(this)||this.vb)&&this.mb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
-f.mb=function(a){var b=L(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:bd(this);break;case 35:cd(this);break;case 38:if("vertical"==this.L)dd(this);else return!1;break;case 37:if("horizontal"==this.L)ic(this)?ed(this):dd(this);else return!1;break;case 40:if("vertical"==this.L)ed(this);else return!1;
-break;case 39:if("horizontal"==this.L)ic(this)?dd(this):ed(this);else return!1;break;default:return!1}return!0};var Zc=function(a,b){var c=b.a(),c=c.id||(c.id=bc(b));a.M||(a.M={});a.M[c]=b};V.prototype.Ca=function(a,b){Ba(a,S,"The child of a container must be a control");V.e.Ca.call(this,a,b)};V.prototype.Va=function(a,b,c){a.T|=2;a.T|=64;!this.I()&&this.Rb||Wc(a,32,!1);a.Pa(!1);V.e.Va.call(this,a,b,c);a.f&&this.f&&Zc(this,a);b<=this.h&&this.h++};
-V.prototype.removeChild=function(a,b){if(a=m(a)?ec(this,a):a){var c=jc(this,a);-1!=c&&(c==this.h?(a.C(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=V.e.removeChild.call(this,a,b);a.Pa(!0);return a};var Jc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=V.prototype;f.s=function(){return this.n};
-f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Hc(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.qa=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,gc(this,function(a){a.xb?delete a.xb:a.qa(!0)})):(gc(this,function(a){a.isEnabled()?a.qa(!1):a.xb=!0}),this.aa=this.U=!1),this.I()&&Hc(this.j(),a&&this.n))};
-f.I=function(){return this.$a};f.na=function(a){a!=this.$a&&this.f&&$c(this,a);this.$a=a;this.U&&this.n&&Hc(this.j(),a)};var ad=function(a,b){var c=L(a,b);c?c.C(!0):-1<a.h&&L(a,a.h).C(!1)};V.prototype.C=function(a){ad(this,jc(this,a))};
-var bd=function(a){fd(a,function(a,c){return(a+1)%c},hc(a)-1)},cd=function(a){fd(a,function(a,c){a--;return 0>a?c-1:a},0)},ed=function(a){fd(a,function(a,c){return(a+1)%c},a.h)},dd=function(a){fd(a,function(a,c){a--;return 0>a?c-1:a},a.h)},fd=function(a,b,c){c=0>c?jc(a,a.i):c;var d=hc(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=L(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Wa=function(a){ad(this,a)};var gd=function(){};p(gd,R);ba(gd);f=gd.prototype;f.v=function(){return"goog-tab"};f.da=function(){return"tab"};f.o=function(a){var b=gd.e.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=gd.e.K.call(this,a,b);var c=this.Ua(b);c&&(a.sb=c);a.g&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var hd=function(a,b,c){S.call(this,a,b||gd.ga(),c);Wc(this,8,!0);this.T|=9};p(hd,S);hd.prototype.Ua=function(){return this.sb};hd.prototype.Xa=function(a){this.Ab().Xa(this.a(),a);this.sb=a};Fc("goog-tab",function(){return new hd(null)});var W=function(){this.Gb="tablist"};p(W,Q);ba(W);W.prototype.v=function(){return"goog-tab-bar"};W.prototype.bb=function(a,b,c){this.Bb||(this.Ka||id(this),this.Bb=Qa(this.Ka));var d=this.Bb[b];d?(Jc(a,jd(d)),a.yb=d):W.e.bb.call(this,a,b,c)};W.prototype.ta=function(a){var b=W.e.ta.call(this,a);this.Ka||id(this);b.push(this.Ka[a.yb]);return b};var id=function(a){var b=a.v();a.Ka={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Jc(this,jd(a));this.yb=a;V.call(this,this.L,b||W.ga(),c);kd(this)};p(X,V);f=X.prototype;f.ac=!0;f.H=null;f.D=function(){X.e.D.call(this);kd(this)};f.removeChild=function(a,b){ld(this,a);return X.e.removeChild.call(this,a,b)};f.Wa=function(a){X.e.Wa.call(this,a);this.ac&&this.V(L(this,a))};f.V=function(a){a?Uc(a,!0):this.H&&Uc(this.H,!1)};
-var ld=function(a,b){if(b&&b==a.H){for(var c=jc(a,b),d=c-1;b=L(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=L(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=X.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Uc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){ld(this,a.target)};f.Yb=function(a){ld(this,a.target)};f.ma=function(){L(this,this.h)||this.C(this.H||L(this,0))};
-var kd=function(a){dc(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},jd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Fc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,nc(a,h.da()),P(a,"goog-zippy-header"),md(h,a),a&&h.Ob.c(a,"keydown",h.Pb))}I.call(this);this.A=e||fb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.ca=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=!0==c;this.Ob=new H(this);this.qb=new H(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Y,I);f=Y.prototype;f.Z=!0;f.da=function(){return"tab"};f.B=function(){return this.ca};f.toggle=function(){this.S(!this.k)};
-f.S=function(a){this.ca?J(this.ca,a):a&&this.Qa&&(this.ca=this.Qa());this.ca&&P(this.ca,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?P(b,"goog-zippy-expanded"):Cc(b,"goog-zippy-expanded");b=this.R;a?Cc(b,"goog-zippy-collapsed"):P(b,"goog-zippy-collapsed");pc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new nd("toggle",this))};f.pb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(md(this,this.R),md(this,this.Aa)):this.qb.ab())};
-var md=function(a,b){b&&a.qb.c(b,"click",a.bc)};Y.prototype.Pb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new C("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.bc=function(){this.toggle();this.dispatchEvent(new C("action",this))};var nd=function(a,b){C.call(this,a,b)};p(nd,C);var Z=function(a,b){this.ob=[];for(var c=hb("span","ae-zippy",gb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.ob.push(e)}this.hc=new od(this.ob,gb(document,b))};Z.prototype.kc=function(){return this.hc};Z.prototype.lc=function(){return this.ob};
-var od=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)F(d,"toggle",this.Wb,!1,this);this.La=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=lb("span",{className:c},"Expand All");F(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};od.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
-od.prototype.Wb=function(a){a=a.currentTarget;this.La=a.k?this.La+1:this.La-1;a.k!=this.k&&(a.k?(this.k=!0,pd(this,!0)):0==this.La&&(this.k=!1,pd(this,!1)))};od.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);pd(this)};
-var pd=function(a,b){(void 0!==b?b:a.k)?(Ma(a.P,"ae-plus"),Ka(a.P,"ae-minus"),ob(a.P,"Collapse All")):(Ma(a.P,"ae-minus"),Ka(a.P,"ae-plus"),ob(a.P,"Expand All"))},qd=function(a){this.cc=a;this.Db={};var b,c=lb("div",{},b=lb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),lb("div",{className:"goog-tab-bar-clear"}),a=lb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.K(b);F(d,"select",this.Cb,!1,this);F(d,"unselect",this.Cb,!1,this);
-b=0;for(var e;e=this.cc[b];b++)if(e=gb(document,"ae-stats-details-"+e)){var g=hb("h2",null,e)[0],h;h=g;var k=void 0;cb&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],tb(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");cb||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new hd(h);this.Db[ka(g)]=e;d.Ca(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}gb(document,"bd").appendChild(c)};
-qd.prototype.Cb=function(a){var b=this.Db[ka(a.target)];J(b,"select"==a.type)};ma("ae.Stats.Details.Tabs",qd);ma("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.S;ma("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.kc;Z.prototype.getZippys=Z.prototype.lc;od.prototype.setExpanded=od.prototype.S;var $=function(){this.cb=[];this.ib=[]},rd=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],sd=function(a){if(0>=a)return[2,0.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<rd.length;c++)if(a<=rd[c][2])return[rd[c][0],rd[c][1]*b,rd[c][2]*b];return[5,2*b,10*b]};$.prototype.hb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.ib.push(a)};
-var td=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.hb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
-$.prototype.jc=function(){this.ib=[];var a=sd(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');td(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.w+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ia.length&&this.write('<a class="'+
+f.qa=function(a){var b=this.getParent();b&&"function"==typeof b.isEnabled&&!b.isEnabled()||!T(this,1,!a)||(a||(this.setActive(!1),this.C(!1)),this.s()&&this.b.na(this,a),this.t(1,!a))};f.C=function(a){T(this,2,a)&&this.t(2,a)};f.setActive=function(a){T(this,4,a)&&this.t(4,a)};var Vc=function(a,b){T(a,8,b)&&a.t(8,b)},Wc=function(a,b){T(a,64,b)&&a.t(64,b)};S.prototype.t=function(a,b){this.l&a&&b!=!!(this.g&a)&&(this.b.t(this,a,b),this.g=b?this.g|a:this.g&~a)};
+var Xc=function(a,b,c){if(a.f&&a.g&b&&!c)throw Error("Component already rendered");!c&&a.g&b&&a.t(b,!1);a.l=c?a.l|b:a.l&~b},U=function(a,b){return!!(a.ec&b)&&!!(a.l&b)},T=function(a,b,c){return!!(a.l&b)&&!!(a.g&b)!=c&&(!(a.T&b)||a.dispatchEvent(bc(b,c)))&&!a.Ub};f=S.prototype;f.Sa=function(a){(!a.relatedTarget||!ob(this.a(),a.relatedTarget))&&this.dispatchEvent("enter")&&this.isEnabled()&&U(this,2)&&this.C(!0)};
+f.Ra=function(a){a.relatedTarget&&ob(this.a(),a.relatedTarget)||!this.dispatchEvent("leave")||(U(this,4)&&this.setActive(!1),U(this,2)&&this.C(!1))};f.oa=aa;f.ka=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),!Bb(a)||z&&A&&a.ctrlKey||(U(this,4)&&this.setActive(!0),this.b.I(this)&&this.j().focus()));this.xa||!Bb(a)||z&&A&&a.ctrlKey||a.preventDefault()};f.Ta=function(a){this.isEnabled()&&(U(this,2)&&this.C(!0),this.g&4&&Yc(this,a)&&U(this,4)&&this.setActive(!1))};
+f.tb=function(a){this.isEnabled()&&Yc(this,a)};var Yc=function(a,b){if(U(a,16)){var c=!(a.g&16);T(a,16,c)&&a.t(16,c)}U(a,8)&&Vc(a,!0);U(a,64)&&Wc(a,!(a.g&64));c=new C("action",a);b&&(c.altKey=b.altKey,c.ctrlKey=b.ctrlKey,c.metaKey=b.metaKey,c.shiftKey=b.shiftKey,c.jb=b.jb);return a.dispatchEvent(c)};S.prototype.ma=function(){U(this,32)&&T(this,32,!0)&&this.t(32,!0)};S.prototype.la=function(){U(this,4)&&this.setActive(!1);U(this,32)&&T(this,32,!1)&&this.t(32,!1)};
+S.prototype.J=function(a){return this.s()&&this.isEnabled()&&this.mb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};S.prototype.mb=function(a){return 13==a.keyCode&&Yc(this,a)};if(!n(S))throw Error("Invalid component class "+S);if(!n(R))throw Error("Invalid renderer class "+R);var Zc=ka(S);Hc[Zc]=R;Gc("goog-control",function(){return new S(null)});var V=function(a,b,c){K.call(this,c);this.b=b||Q.ga();this.L=a||"vertical"};p(V,K);f=V.prototype;f.vb=null;f.ea=null;f.b=null;f.L=null;f.n=!0;f.U=!0;f.$a=!0;f.h=-1;f.i=null;f.aa=!1;f.Rb=!1;f.Qb=!0;f.M=null;f.j=function(){return this.vb||this.b.j(this)};f.ya=function(){return this.ea||(this.ea=new M(this.j()))};f.Ab=function(){return this.b};f.o=function(){this.d=this.b.o(this)};f.B=function(){return this.b.B(this.a())};f.X=function(a){return this.b.X(a)};
+f.Za=function(a){this.d=this.b.K(this,a);"none"==a.style.display&&(this.n=!1)};f.D=function(){V.e.D.call(this);hc(this,function(a){a.f&&$c(this,a)},this);var a=this.a();this.b.Oa(this);this.ja(this.n,!0);ec(this).c(this,"enter",this.Kb).c(this,"highlight",this.Lb).c(this,"unhighlight",this.Nb).c(this,"open",this.Mb).c(this,"close",this.Ib).c(a,"mousedown",this.ka).c(fb(a),"mouseup",this.Jb).c(a,["mousedown","mouseup","mouseover","mouseout","contextmenu"],this.Hb);this.I()&&ad(this,!0)};
+var ad=function(a,b){var c=ec(a),d=a.j();b?c.c(d,"focus",a.ma).c(d,"blur",a.la).c(a.ya(),"key",a.J):c.u(d,"focus",a.ma).u(d,"blur",a.la).u(a.ya(),"key",a.J)};f=V.prototype;f.ba=function(){bd(this,-1);this.i&&Wc(this.i,!1);this.aa=!1;V.e.ba.call(this)};f.Kb=function(){return!0};
+f.Lb=function(a){var b=kc(this,a.target);if(-1<b&&b!=this.h){var c=L(this,this.h);c&&c.C(!1);this.h=b;c=L(this,this.h);this.aa&&c.setActive(!0);this.Qb&&this.i&&c!=this.i&&(c.l&64?Wc(c,!0):Wc(this.i,!1))}b=this.a();r(b,"The DOM element for the container cannot be null.");null!=a.target.a()&&qc(b,"activedescendant",a.target.a().id)};f.Nb=function(a){a.target==L(this,this.h)&&(this.h=-1);a=this.a();r(a,"The DOM element for the container cannot be null.");a.removeAttribute(pc("activedescendant"))};
+f.Mb=function(a){(a=a.target)&&a!=this.i&&a.getParent()==this&&(this.i&&Wc(this.i,!1),this.i=a)};f.Ib=function(a){a.target==this.i&&(this.i=null)};f.ka=function(a){this.U&&(this.aa=!0);var b=this.j();b&&sb(b)&&tb(b)?b.focus():a.preventDefault()};f.Jb=function(){this.aa=!1};
+f.Hb=function(a){var b;t:{b=a.target;if(this.M)for(var c=this.a();b&&b!==c;){var d=b.id;if(d in this.M){b=this.M[d];break t}b=b.parentNode}b=null}if(b)switch(a.type){case "mousedown":b.ka(a);break;case "mouseup":b.Ta(a);break;case "mouseover":b.Sa(a);break;case "mouseout":b.Ra(a);break;case "contextmenu":b.oa(a)}};f.ma=function(){};f.la=function(){bd(this,-1);this.aa=!1;this.i&&Wc(this.i,!1)};
+f.J=function(a){return this.isEnabled()&&this.s()&&(0!=ic(this)||this.vb)&&this.mb(a)?(a.preventDefault(),a.stopPropagation(),!0):!1};
+f.mb=function(a){var b=L(this,this.h);if(b&&"function"==typeof b.J&&b.J(a)||this.i&&this.i!=b&&"function"==typeof this.i.J&&this.i.J(a))return!0;if(a.shiftKey||a.ctrlKey||a.metaKey||a.altKey)return!1;switch(a.keyCode){case 27:if(this.I())this.j().blur();else return!1;break;case 36:cd(this);break;case 35:dd(this);break;case 38:if("vertical"==this.L)ed(this);else return!1;break;case 37:if("horizontal"==this.L)jc(this)?fd(this):ed(this);else return!1;break;case 40:if("vertical"==this.L)fd(this);else return!1;
+break;case 39:if("horizontal"==this.L)jc(this)?ed(this):fd(this);else return!1;break;default:return!1}return!0};var $c=function(a,b){var c=b.a(),c=c.id||(c.id=cc(b));a.M||(a.M={});a.M[c]=b};V.prototype.Ca=function(a,b){Ca(a,S,"The child of a container must be a control");V.e.Ca.call(this,a,b)};V.prototype.Va=function(a,b,c){a.T|=2;a.T|=64;!this.I()&&this.Rb||Xc(a,32,!1);a.Pa(!1);V.e.Va.call(this,a,b,c);a.f&&this.f&&$c(this,a);b<=this.h&&this.h++};
+V.prototype.removeChild=function(a,b){if(a=m(a)?fc(this,a):a){var c=kc(this,a);-1!=c&&(c==this.h?(a.C(!1),this.h=-1):c<this.h&&this.h--);var d=a.a();d&&d.id&&this.M&&(c=this.M,d=d.id,d in c&&delete c[d])}a=V.e.removeChild.call(this,a,b);a.Pa(!0);return a};var Kc=function(a,b){if(a.a())throw Error("Component already rendered");a.L=b};f=V.prototype;f.s=function(){return this.n};
+f.ja=function(a,b){if(b||this.n!=a&&this.dispatchEvent(a?"show":"hide")){this.n=a;var c=this.a();c&&(J(c,a),this.I()&&Ic(this.j(),this.U&&this.n),b||this.dispatchEvent(this.n?"aftershow":"afterhide"));return!0}return!1};f.isEnabled=function(){return this.U};f.qa=function(a){this.U!=a&&this.dispatchEvent(a?"enable":"disable")&&(a?(this.U=!0,hc(this,function(a){a.xb?delete a.xb:a.qa(!0)})):(hc(this,function(a){a.isEnabled()?a.qa(!1):a.xb=!0}),this.aa=this.U=!1),this.I()&&Ic(this.j(),a&&this.n))};
+f.I=function(){return this.$a};f.na=function(a){a!=this.$a&&this.f&&ad(this,a);this.$a=a;this.U&&this.n&&Ic(this.j(),a)};var bd=function(a,b){var c=L(a,b);c?c.C(!0):-1<a.h&&L(a,a.h).C(!1)};V.prototype.C=function(a){bd(this,kc(this,a))};
+var cd=function(a){gd(a,function(a,c){return(a+1)%c},ic(a)-1)},dd=function(a){gd(a,function(a,c){a--;return 0>a?c-1:a},0)},fd=function(a){gd(a,function(a,c){return(a+1)%c},a.h)},ed=function(a){gd(a,function(a,c){a--;return 0>a?c-1:a},a.h)},gd=function(a,b,c){c=0>c?kc(a,a.i):c;var d=ic(a);c=b.call(a,c,d);for(var e=0;e<=d;){var g=L(a,c);if(g&&g.s()&&g.isEnabled()&&g.l&2){a.Wa(c);break}e++;c=b.call(a,c,d)}};V.prototype.Wa=function(a){bd(this,a)};var hd=function(){};p(hd,R);ba(hd);f=hd.prototype;f.v=function(){return"goog-tab"};f.da=function(){return"tab"};f.o=function(a){var b=hd.e.o.call(this,a);(a=a.Ua())&&this.Xa(b,a);return b};f.K=function(a,b){b=hd.e.K.call(this,a,b);var c=this.Ua(b);c&&(a.sb=c);a.g&8&&(c=a.getParent())&&n(c.V)&&(a.t(8,!1),c.V(a));return b};f.Ua=function(a){return a.title||""};f.Xa=function(a,b){a&&(a.title=b||"")};var id=function(a,b,c){S.call(this,a,b||hd.ga(),c);Xc(this,8,!0);this.T|=9};p(id,S);id.prototype.Ua=function(){return this.sb};id.prototype.Xa=function(a){this.Ab().Xa(this.a(),a);this.sb=a};Gc("goog-tab",function(){return new id(null)});var W=function(){this.Gb="tablist"};p(W,Q);ba(W);W.prototype.v=function(){return"goog-tab-bar"};W.prototype.bb=function(a,b,c){this.Bb||(this.Ka||jd(this),this.Bb=Ra(this.Ka));var d=this.Bb[b];d?(Kc(a,kd(d)),a.yb=d):W.e.bb.call(this,a,b,c)};W.prototype.ta=function(a){var b=W.e.ta.call(this,a);this.Ka||jd(this);b.push(this.Ka[a.yb]);return b};var jd=function(a){var b=a.v();a.Ka={top:b+"-top",bottom:b+"-bottom",start:b+"-start",end:b+"-end"}};var X=function(a,b,c){a=a||"top";Kc(this,kd(a));this.yb=a;V.call(this,this.L,b||W.ga(),c);ld(this)};p(X,V);f=X.prototype;f.ac=!0;f.H=null;f.D=function(){X.e.D.call(this);ld(this)};f.removeChild=function(a,b){md(this,a);return X.e.removeChild.call(this,a,b)};f.Wa=function(a){X.e.Wa.call(this,a);this.ac&&this.V(L(this,a))};f.V=function(a){a?Vc(a,!0):this.H&&Vc(this.H,!1)};
+var md=function(a,b){if(b&&b==a.H){for(var c=kc(a,b),d=c-1;b=L(a,d);d--)if(b.s()&&b.isEnabled()){a.V(b);return}for(c+=1;b=L(a,c);c++)if(b.s()&&b.isEnabled()){a.V(b);return}a.V(null)}};f=X.prototype;f.Zb=function(a){this.H&&this.H!=a.target&&Vc(this.H,!1);this.H=a.target};f.$b=function(a){a.target==this.H&&(this.H=null)};f.Xb=function(a){md(this,a.target)};f.Yb=function(a){md(this,a.target)};f.ma=function(){L(this,this.h)||this.C(this.H||L(this,0))};
+var ld=function(a){ec(a).c(a,"select",a.Zb).c(a,"unselect",a.$b).c(a,"disable",a.Xb).c(a,"hide",a.Yb)},kd=function(a){return"start"==a||"end"==a?"vertical":"horizontal"};Gc("goog-tab-bar",function(){return new X});var Y=function(a,b,c,d,e){function g(a){a&&(a.tabIndex=0,oc(a,h.da()),P(a,"goog-zippy-header"),nd(h,a),a&&h.Ob.c(a,"keydown",h.Pb))}I.call(this);this.A=e||gb();this.R=this.A.a(a)||null;this.Aa=this.A.a(d||null);this.ca=(this.Qa=n(b)?b:null)||!b?null:this.A.a(b);this.k=!0==c;this.Ob=new H(this);this.qb=new H(this);var h=this;g(this.R);g(this.Aa);this.S(this.k)};p(Y,I);f=Y.prototype;f.Z=!0;f.da=function(){return"tab"};f.B=function(){return this.ca};f.toggle=function(){this.S(!this.k)};
+f.S=function(a){this.ca?J(this.ca,a):a&&this.Qa&&(this.ca=this.Qa());this.ca&&P(this.ca,"goog-zippy-content");if(this.Aa)J(this.R,!a),J(this.Aa,a);else if(this.R){var b=this.R;a?P(b,"goog-zippy-expanded"):Dc(b,"goog-zippy-expanded");b=this.R;a?Dc(b,"goog-zippy-collapsed"):P(b,"goog-zippy-collapsed");qc(this.R,"expanded",a)}this.k=a;this.dispatchEvent(new od("toggle",this))};f.pb=function(){return this.Z};f.Pa=function(a){this.Z!=a&&((this.Z=a)?(nd(this,this.R),nd(this,this.Aa)):this.qb.ab())};
+var nd=function(a,b){b&&a.qb.c(b,"click",a.bc)};Y.prototype.Pb=function(a){if(13==a.keyCode||32==a.keyCode)this.toggle(),this.dispatchEvent(new C("action",this)),a.preventDefault(),a.stopPropagation()};Y.prototype.bc=function(){this.toggle();this.dispatchEvent(new C("action",this))};var od=function(a,b){C.call(this,a,b)};p(od,C);var Z=function(a,b){this.ob=[];for(var c=ib("span","ae-zippy",hb(document,a)),d=0,e;e=c[d];d++){var g=e.parentNode.parentNode.parentNode;if(void 0!=g.nextElementSibling)g=g.nextElementSibling;else for(g=g.nextSibling;g&&1!=g.nodeType;)g=g.nextSibling;e=new Y(e,g,!1);this.ob.push(e)}this.hc=new pd(this.ob,hb(document,b))};Z.prototype.kc=function(){return this.hc};Z.prototype.lc=function(){return this.ob};
+var pd=function(a,b){this.va=a;if(this.va.length)for(var c=0,d;d=this.va[c];c++)F(d,"toggle",this.Wb,!1,this);this.La=0;this.k=!1;c="ae-toggle ae-plus ae-action";this.va.length||(c+=" ae-disabled");this.P=mb("span",{className:c},"Expand All");F(this.P,"click",this.Vb,!1,this);b&&b.appendChild(this.P)};pd.prototype.Vb=function(){this.va.length&&this.S(!this.k)};
+pd.prototype.Wb=function(a){a=a.currentTarget;this.La=a.k?this.La+1:this.La-1;a.k!=this.k&&(a.k?(this.k=!0,qd(this,!0)):0==this.La&&(this.k=!1,qd(this,!1)))};pd.prototype.S=function(a){this.k=a;a=0;for(var b;b=this.va[a];a++)b.k!=this.k&&b.S(this.k);qd(this)};
+var qd=function(a,b){(void 0!==b?b:a.k)?(Na(a.P,"ae-plus"),La(a.P,"ae-minus"),pb(a.P,"Collapse All")):(Na(a.P,"ae-minus"),La(a.P,"ae-plus"),pb(a.P,"Expand All"))},rd=function(a){this.cc=a;this.Db={};var b,c=mb("div",{},b=mb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),mb("div",{className:"goog-tab-bar-clear"}),a=mb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new X;d.K(b);F(d,"select",this.Cb,!1,this);F(d,"unselect",this.Cb,!1,this);
+b=0;for(var e;e=this.cc[b];b++)if(e=hb(document,"ae-stats-details-"+e)){var g=ib("h2",null,e)[0],h;h=g;var k=void 0;db&&"innerText"in h?k=h.innerText.replace(/(\r\n|\r|\n)/g,"\n"):(k=[],ub(h,k,!0),k=k.join(""));k=k.replace(/ \xAD /g," ").replace(/\xAD/g,"");k=k.replace(/\u200B/g,"");db||(k=k.replace(/ +/g," "));" "!=k&&(k=k.replace(/^\s*/,""));h=k;g&&g.parentNode&&g.parentNode.removeChild(g);g=new id(h);this.Db[ka(g)]=e;d.Ca(g,!0);a.appendChild(e);0==b?d.V(g):J(e,!1)}hb(document,"bd").appendChild(c)};
+rd.prototype.Cb=function(a){var b=this.Db[ka(a.target)];J(b,"select"==a.type)};ma("ae.Stats.Details.Tabs",rd);ma("goog.ui.Zippy",Y);Y.prototype.setExpanded=Y.prototype.S;ma("ae.Stats.MakeZippys",Z);Z.prototype.getExpandCollapse=Z.prototype.kc;Z.prototype.getZippys=Z.prototype.lc;pd.prototype.setExpanded=pd.prototype.S;var $=function(){this.cb=[];this.ib=[]},sd=[[5,.2,1],[6,.2,1.2],[5,.25,1.25],[6,.25,1.5],[4,.5,2],[5,.5,2.5],[6,.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],td=function(a){if(0>=a)return[2,.5,1];for(var b=1;1>a;)a*=10,b/=10;for(;10<=a;)a/=10,b*=10;for(var c=0;c<sd.length;c++)if(a<=sd[c][2])return[sd[c][0],sd[c][1]*b,sd[c][2]*b];return[5,2*b,10*b]};$.prototype.hb="stats/static/pix.gif";$.prototype.w="ae-stats-gantt-";$.prototype.fb=0;$.prototype.write=function(a){this.ib.push(a)};
+var ud=function(a,b,c,d){a.write('<tr class="'+a.w+'axisrow"><td width="20%"></td><td>');a.write('<div class="'+a.w+'axis">');for(var e=0;e<=b;e++)a.write('<img class="'+a.w+'tick" src="'+a.hb+'" alt="" '),a.write('style="left:'+e*c*d+'%"\n>'),a.write('<span class="'+a.w+'scale" style="left:'+e*c*d+'%">'),a.write("&nbsp;"+e*c+"</span>");a.write("</div></td></tr>\n")};
+$.prototype.jc=function(){this.ib=[];var a=td(this.fb),b=a[0],c=a[1],a=100/a[2];this.write('<table class="'+this.w+'table">\n');ud(this,b,c,a);for(var d=0;d<this.cb.length;d++){var e=this.cb[d];this.write('<tr class="'+this.w+'datarow"><td width="20%">');0<e.label.length&&(0<e.ia.length&&this.write('<a class="'+this.w+'link" href="'+e.ia+'">'),this.write(e.label),0<e.ia.length&&this.write("</a>"));this.write("</td>\n<td>");this.write('<div class="'+this.w+'container">');0<e.ia.length&&this.write('<a class="'+
 this.w+'link" href="'+e.ia+'"\n>');this.write('<img class="'+this.w+'bar" src="'+this.hb+'" alt="" ');this.write('style="left:'+e.start*a+"%;width:"+e.duration*a+'%;min-width:1px"\n>');0<e.eb&&(this.write('<img class="'+this.w+'extra" src="'+this.hb+'" alt="" '),this.write('style="left:'+e.start*a+"%;width:"+e.eb*a+'%"\n>'));0<e.zb.length&&(this.write('<span class="'+this.w+'inline" style="left:'+(e.start+Math.max(e.duration,e.eb))*a+'%">&nbsp;'),this.write(e.zb),this.write("</span>"));0<e.ia.length&&
-this.write("</a>");this.write("</div></td></tr>\n")}td(this,b,c,a);this.write("</table>\n");return this.ib.join("")};$.prototype.ic=function(a,b,c,d,e,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,zb:e,ia:g})};ma("Gantt",$);$.prototype.add_bar=$.prototype.ic;$.prototype.draw=$.prototype.jc;})();
+this.write("</a>");this.write("</div></td></tr>\n")}ud(this,b,c,a);this.write("</table>\n");return this.ib.join("")};$.prototype.ic=function(a,b,c,d,e,g){this.fb=Math.max(this.fb,Math.max(b+c,b+d));this.cb.push({label:a,start:b,duration:c,eb:d,zb:e,ia:g})};ma("Gantt",$);$.prototype.add_bar=$.prototype.ic;$.prototype.draw=$.prototype.jc;})();
diff --git a/google/appengine/ext/datastore_admin/backup_pb2.py b/google/appengine/ext/datastore_admin/backup_pb2.py
index 25002eb..5ee7f1d 100644
--- a/google/appengine/ext/datastore_admin/backup_pb2.py
+++ b/google/appengine/ext/datastore_admin/backup_pb2.py
@@ -22,9 +22,12 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 from google.net.proto2.proto import descriptor_pb2
 
 
+_sym_db = _symbol_database.Default()
+
 
 
 
@@ -33,6 +36,7 @@
   package='apphosting.ext.datastore_admin',
   serialized_pb=_b('\n+apphosting/ext/datastore_admin/backup.proto\x12\x1e\x61pphosting.ext.datastore_admin\"\x8c\x01\n\x06\x42\x61\x63kup\x12?\n\x0b\x62\x61\x63kup_info\x18\x01 \x01(\x0b\x32*.apphosting.ext.datastore_admin.BackupInfo\x12\x41\n\tkind_info\x18\x02 \x03(\x0b\x32..apphosting.ext.datastore_admin.KindBackupInfo\"Q\n\nBackupInfo\x12\x13\n\x0b\x62\x61\x63kup_name\x18\x01 \x01(\t\x12\x17\n\x0fstart_timestamp\x18\x02 \x01(\x03\x12\x15\n\rend_timestamp\x18\x03 \x01(\x03\"\x8c\x01\n\x0eKindBackupInfo\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\x0c\n\x04\x66ile\x18\x02 \x03(\t\x12\x43\n\rentity_schema\x18\x03 \x01(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x12\x19\n\nis_partial\x18\x04 \x01(\x08:\x05\x66\x61lse\"\x90\x05\n\x0c\x45ntitySchema\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x41\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x32.apphosting.ext.datastore_admin.EntitySchema.Field\x1a\xb2\x01\n\x04Type\x12\x0f\n\x07is_list\x18\x01 \x01(\x08\x12R\n\x0eprimitive_type\x18\x02 \x03(\x0e\x32:.apphosting.ext.datastore_admin.EntitySchema.PrimitiveType\x12\x45\n\x0f\x65mbedded_schema\x18\x03 \x03(\x0b\x32,.apphosting.ext.datastore_admin.EntitySchema\x1aj\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x02(\t\x12?\n\x04type\x18\x02 \x03(\x0b\x32\x31.apphosting.ext.datastore_admin.EntitySchema.Type\x12\x12\n\nfield_name\x18\x03 \x01(\t\"\x8d\x02\n\rPrimitiveType\x12\t\n\x05\x46LOAT\x10\x00\x12\x0b\n\x07INTEGER\x10\x01\x12\x0b\n\x07\x42OOLEAN\x10\x02\x12\n\n\x06STRING\x10\x03\x12\r\n\tDATE_TIME\x10\x04\x12\n\n\x06RATING\x10\x05\x12\x08\n\x04LINK\x10\x06\x12\x0c\n\x08\x43\x41TEGORY\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\x08\x12\x12\n\x0ePOSTAL_ADDRESS\x10\t\x12\t\n\x05\x45MAIL\x10\n\x12\r\n\tIM_HANDLE\x10\x0b\x12\x0c\n\x08\x42LOB_KEY\x10\x0c\x12\x08\n\x04TEXT\x10\r\x12\x08\n\x04\x42LOB\x10\x0e\x12\x0e\n\nSHORT_BLOB\x10\x0f\x12\x08\n\x04USER\x10\x10\x12\r\n\tGEO_POINT\x10\x11\x12\r\n\tREFERENCE\x10\x12\x42\x14\x10\x02 \x02(\x02\x42\x0c\x42\x61\x63kupProtos')
 )
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
@@ -124,6 +128,7 @@
   serialized_start=836,
   serialized_end=1105,
 )
+_sym_db.RegisterEnumDescriptor(_ENTITYSCHEMA_PRIMITIVETYPE)
 
 
 _BACKUP = _descriptor.Descriptor(
@@ -389,18 +394,21 @@
   __module__ = 'google.appengine.ext.datastore_admin.backup_pb2'
 
   ))
+_sym_db.RegisterMessage(Backup)
 
 BackupInfo = _reflection.GeneratedProtocolMessageType('BackupInfo', (_message.Message,), dict(
   DESCRIPTOR = _BACKUPINFO,
   __module__ = 'google.appengine.ext.datastore_admin.backup_pb2'
 
   ))
+_sym_db.RegisterMessage(BackupInfo)
 
 KindBackupInfo = _reflection.GeneratedProtocolMessageType('KindBackupInfo', (_message.Message,), dict(
   DESCRIPTOR = _KINDBACKUPINFO,
   __module__ = 'google.appengine.ext.datastore_admin.backup_pb2'
 
   ))
+_sym_db.RegisterMessage(KindBackupInfo)
 
 EntitySchema = _reflection.GeneratedProtocolMessageType('EntitySchema', (_message.Message,), dict(
 
@@ -421,6 +429,9 @@
   __module__ = 'google.appengine.ext.datastore_admin.backup_pb2'
 
   ))
+_sym_db.RegisterMessage(EntitySchema)
+_sym_db.RegisterMessage(EntitySchema.Type)
+_sym_db.RegisterMessage(EntitySchema.Field)
 
 
 DESCRIPTOR.has_options = True
diff --git a/google/appengine/ext/datastore_admin/static/js/compiled.js b/google/appengine/ext/datastore_admin/static/js/compiled.js
index 863aa30..b328a0f 100644
--- a/google/appengine/ext/datastore_admin/static/js/compiled.js
+++ b/google/appengine/ext/datastore_admin/static/js/compiled.js
@@ -1,18 +1,18 @@
 var g=document,k=Array,l=Error,aa=parseInt,n=String;function p(a,b){return a.currentTarget=b}function q(a,b){return a.keyCode=b}function r(a,b){return a.disabled=b}
 var s="push",t="shift",u="slice",v="replace",w="value",x="preventDefault",y="indexOf",z="keyCode",A="type",ba="name",B="toString",C="length",ca="propertyIsEnumerable",D="prototype",da="checked",E="split",F="style",ea="target",G="call",H="apply",I,J=this,K=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof k)return"array";if(a instanceof Object)return b;var c=Object[D][B][G](a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a[C]&&"undefined"!=typeof a.splice&&
 "undefined"!=typeof a[ca]&&!a[ca]("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a[G]&&"undefined"!=typeof a[ca]&&!a[ca]("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a[G])return"object";return b},fa=function(a){var b=K(a);return"array"==b||"object"==b&&"number"==typeof a[C]},L=function(a){return"string"==typeof a},ga=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ha=function(a,b){var c=k[D][u][G](arguments,1);
-return function(){var b=c[u]();b[s][H](b,arguments);return a[H](this,b)}},ia=function(a,b){function c(){}c.prototype=b[D];a.t=b[D];a.prototype=new c;a.A=function(a,c,e){return b[D][c][H](a,k[D][u][G](arguments,2))}};var M=function(a){if(l.captureStackTrace)l.captureStackTrace(this,M);else{var b=l().stack;b&&(this.stack=b)}a&&(this.message=n(a))};ia(M,l);M[D].name="CustomError";var ja=function(a,b){for(var c=a[E]("%s"),d="",f=k[D][u][G](arguments,1);f[C]&&1<c[C];)d+=c[t]()+f[t]();return d+c.join("%s")},qa=function(a,b){if(b)return a[v](ka,"&amp;")[v](la,"&lt;")[v](ma,"&gt;")[v](na,"&quot;")[v](oa,"&#39;");if(!pa.test(a))return a;-1!=a[y]("&")&&(a=a[v](ka,"&amp;"));-1!=a[y]("<")&&(a=a[v](la,"&lt;"));-1!=a[y](">")&&(a=a[v](ma,"&gt;"));-1!=a[y]('"')&&(a=a[v](na,"&quot;"));-1!=a[y]("'")&&(a=a[v](oa,"&#39;"));return a},ka=/&/g,la=/</g,ma=/>/g,na=/"/g,oa=/'/g,pa=/[&<>"']/,ra=
-function(a,b){return a<b?-1:a>b?1:0},sa=function(a){return n(a)[v](/\-([a-z])/g,function(a,c){return c.toUpperCase()})},ta=function(a,b){var c=L(b)?n(b)[v](/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1")[v](/\x08/g,"\\x08"):"\\s";return a[v](new RegExp("(^"+(c?"|["+c+"]+":"")+")([a-z])","g"),function(a,b,c){return b+c.toUpperCase()})};var ua=function(a,b){b.unshift(a);M[G](this,ja[H](null,b));b[t]()};ia(ua,M);ua[D].name="AssertionError";var N=function(a,b,c){if(!a){var d="Assertion failed";if(b)var d=d+(": "+b),f=k[D][u][G](arguments,2);throw new ua(""+d,f||[]);}return a};var O=k[D],va=O[y]?function(a,b,c){N(null!=a[C]);return O[y][G](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[C]+c):c;if(L(a))return L(b)&&1==b[C]?a[y](b,c):-1;for(;c<a[C];c++)if(c in a&&a[c]===b)return c;return-1},wa=O.forEach?function(a,b,c){N(null!=a[C]);O.forEach[G](a,b,c)}:function(a,b,c){for(var d=a[C],f=L(a)?a[E](""):a,e=0;e<d;e++)e in f&&b[G](c,f[e],e,a)},xa=function(a){var b=a[C];if(0<b){for(var c=k(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},ya=function(a,b,c){N(null!=a[C]);return 2>=
-arguments[C]?O[u][G](a,b):O[u][G](a,b,c)};var za=function(a,b,c){for(var d in a)b[G](c,a[d],d,a)},Aa="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ba=function(a,b){for(var c,d,f=1;f<arguments[C];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<Aa[C];e++)c=Aa[e],Object[D].hasOwnProperty[G](d,c)&&(a[c]=d[c])}};var P;t:{var Ca=J.navigator;if(Ca){var Da=Ca.userAgent;if(Da){P=Da;break t}}P=""};var Ea=-1!=P[y]("Opera")||-1!=P[y]("OPR"),Q=-1!=P[y]("Trident")||-1!=P[y]("MSIE"),R=-1!=P[y]("Gecko")&&-1==P.toLowerCase()[y]("webkit")&&!(-1!=P[y]("Trident")||-1!=P[y]("MSIE")),S=-1!=P.toLowerCase()[y]("webkit"),Fa=function(){var a=J.document;return a?a.documentMode:void 0},Ga=function(){var a="",b;if(Ea&&J.opera)return a=J.opera.version,"function"==K(a)?a():a;R?b=/rv\:([^\);]+)(\)|;)/:Q?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:S&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(P))?a[1]:"");return Q&&(b=Fa(),b>
-parseFloat(a))?n(b):a}(),Ha={},T=function(a){var b;if(!(b=Ha[a])){b=0;for(var c=n(Ga)[v](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),d=n(a)[v](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),f=Math.max(c[C],d[C]),e=0;0==b&&e<f;e++){var h=c[e]||"",m=d[e]||"",db=RegExp("(\\d*)(\\D*)","g"),eb=RegExp("(\\d*)(\\D*)","g");do{var V=db.exec(h)||["","",""],W=eb.exec(m)||["","",""];if(0==V[0][C]&&0==W[0][C])break;b=ra(0==V[1][C]?0:aa(V[1],10),0==W[1][C]?0:aa(W[1],10))||ra(0==V[2][C],0==W[2][C])||ra(V[2],W[2])}while(0==b)}b=
-Ha[a]=0<=b}return b},Ia=J.document,Ja=Ia&&Q?Fa()||("CSS1Compat"==Ia.compatMode?aa(Ga,10):5):void 0;var Ka=!Q||Q&&9<=Ja;!R&&!Q||Q&&Q&&9<=Ja||R&&T("1.9.1");Q&&T("9");var La=function(a,b){var c;c=a.className;c=L(c)&&c.match(/\S+/g)||[];for(var d=ya(arguments,1),f=c[C]+d[C],e=c,h=0;h<d[C];h++)0<=va(e,d[h])||e[s](d[h]);a.className=c.join(" ");return c[C]==f};var U=function(a,b){return L(b)?a.getElementById(b):b},Ma=function(a,b,c,d){a=d||a;var f=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(f||c))return a.querySelectorAll(f+(c?"."+c:""));if(c&&a.getElementsByClassName){b=a.getElementsByClassName(c);if(f){a={};for(var e=d=0,h;h=b[e];e++)f==h.nodeName&&(a[d++]=h);a.length=d;return a}return b}b=a.getElementsByTagName(f||"*");if(c){a={};for(e=d=0;h=b[e];e++){var f=h.className,m;if(m="function"==typeof f[E])m=0<=va(f[E](/\s+/),c);m&&
-(a[d++]=h)}a.length=d;return a}return b},Oa=function(a,b){za(b,function(b,d){"style"==d?a[F].cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in Na?a.setAttribute(Na[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},Na={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},Qa=function(a,
-b,c){var d=arguments,f=d[0],e=d[1];if(!Ka&&e&&(e[ba]||e[A])){f=["<",f];e[ba]&&f[s](' name="',qa(e[ba]),'"');if(e[A]){f[s](' type="',qa(e[A]),'"');var h={};Ba(h,e);delete h[A];e=h}f[s](">");f=f.join("")}f=g.createElement(f);e&&(L(e)?f.className=e:"array"==K(e)?La[H](null,[f].concat(e)):Oa(f,e));2<d[C]&&Pa(g,f,d,2);return f},Pa=function(a,b,c,d){function f(c){c&&b.appendChild(L(c)?a.createTextNode(c):c)}for(;d<c[C];d++){var e=c[d];if(!fa(e)||ga(e)&&0<e.nodeType)f(e);else{var h;t:{if(e&&"number"==typeof e[C]){if(ga(e)){h=
-"function"==typeof e.item||"string"==typeof e.item;break t}if("function"==K(e)){h="function"==typeof e.item;break t}}h=!1}wa(h?xa(e):e,f)}}};var Ra=function(a){var b=a[A];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a[da]?a[w]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][w]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[s](c[w]);return b[C]?b:null;default:return void 0!==a[w]?a[w]:null}};var Sa=function(a){Sa[" "](a);return a};Sa[" "]=function(){};var Ta=!Q||Q&&9<=Ja,Ua=Q&&!T("9");!S||T("528");R&&T("1.9b")||Q&&T("8")||Ea&&T("9.5")||S&&T("528");R&&!T("8")||Q&&T("9");var Va=function(a,b){this.type=a;this.target=b;p(this,this[ea]);this.defaultPrevented=this.o=!1};Va[D].preventDefault=function(){this.defaultPrevented=!0};var X=function(a,b){Va[G](this,a?a[A]:"");this.target=null;p(this,null);this.relatedTarget=null;this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;q(this,0);this.charCode=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.p=this.state=null;a&&this.u(a,b)};ia(X,Va);
-X[D].u=function(a,b){var c=this.type=a[A];this.target=a[ea]||a.srcElement;p(this,b);var d=a.relatedTarget;if(d){if(R){var f;t:{try{Sa(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=S||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=S||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||
-0;this.screenY=a.screenY||0;this.button=a.button;q(this,a[z]||0);this.charCode=a.charCode||("keypress"==c?a[z]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.p=a;a.defaultPrevented&&this[x]()};X[D].preventDefault=function(){X.t[x][G](this);var a=this.p;if(a[x])a[x]();else if(a.returnValue=!1,Ua)try{(a.ctrlKey||112<=a[z]&&123>=a[z])&&q(a,-1)}catch(b){}};var Wa="closure_listenable_"+(1E6*Math.random()|0),Xa=function(a){try{return!(!a||!a[Wa])}catch(b){return!1}},Ya=0;var Za=function(a,b,c,d,f,e){this.c=a;this.g=b;this.src=c;this.type=d;this.k=!!f;this.j=e;this.key=++Ya;this.e=this.l=!1};Za[D].n=function(){this.e=!0;this.j=this.src=this.g=this.c=null};var $a=function(a){this.src=a;this.a={};this.m=0};$a[D].add=function(a,b,c,d,f){var e=a[B]();a=this.a[e];a||(a=this.a[e]=[],this.m++);var h;t:{for(h=0;h<a[C];++h){var m=a[h];if(!m.e&&m.c==b&&m.k==!!d&&m.j==f)break t}h=-1}-1<h?(b=a[h],c||(b.l=!1)):(b=new Za(b,null,this.src,e,!!d,f),b.l=c,a[s](b));return b};$a[D].q=function(a){var b=a[A];if(!(b in this.a))return!1;var c=this.a[b],d=va(c,a),f;if(f=0<=d)N(null!=c[C]),O.splice[G](c,d,1);f&&(a.n(),0==this.a[b][C]&&(delete this.a[b],this.m--));return f};var ab="closure_lm_"+(1E6*Math.random()|0),bb={},cb=0,fb=function(a,b,c,d,f){if("array"==K(b)){for(var e=0;e<b[C];e++)fb(a,b[e],c,d,f);return null}c=gb(c);if(Xa(a))a=a.w(b,c,d,f);else{if(!b)throw l("Invalid event type");var e=!!d,h=hb(a);h||(a[ab]=h=new $a(a));c=h.add(b,c,!1,d,f);c.g||(d=ib(),c.g=d,d.src=a,d.c=c,a.addEventListener?a.addEventListener(b[B](),d,e):a.attachEvent(jb(b[B]()),d),cb++);a=c}return a},ib=function(){var a=kb,b=Ta?function(c){return a[G](b.src,b.c,c)}:function(c){c=a[G](b.src,
-b.c,c);if(!c)return c};return b},jb=function(a){return a in bb?bb[a]:bb[a]="on"+a},mb=function(a,b,c,d){var f=1;if(a=hb(a))if(b=a.a[b[B]()])for(b=xa(b),a=0;a<b[C];a++){var e=b[a];e&&e.k==c&&!e.e&&(f&=!1!==lb(e,d))}return Boolean(f)},lb=function(a,b){var c=a.c,d=a.j||a.src;if(a.l&&"number"!=typeof a&&a&&!a.e){var f=a.src;if(Xa(f))f.v(a);else{var e=a[A],h=a.g;f.removeEventListener?f.removeEventListener(e,h,a.k):f.detachEvent&&f.detachEvent(jb(e),h);cb--;(e=hb(f))?(e.q(a),0==e.m&&(e.src=null,f[ab]=null)):
-a.n()}}return c[G](d,b)},kb=function(a,b){if(a.e)return!0;if(!Ta){var c;if(!(c=b))t:{c=["window","event"];for(var d=J,f;f=c[t]();)if(null!=d[f])d=d[f];else{c=null;break t}c=d}f=c;c=new X(f,this);d=!0;if(!(0>f[z]||void 0!=f.returnValue)){t:{var e=!1;if(0==f[z])try{q(f,-1);break t}catch(h){e=!0}if(e||void 0==f.returnValue)f.returnValue=!0}f=[];for(e=c.currentTarget;e;e=e.parentNode)f[s](e);for(var e=a[A],m=f[C]-1;!c.o&&0<=m;m--)p(c,f[m]),d&=mb(f[m],e,!0,c);for(m=0;!c.o&&m<f[C];m++)p(c,f[m]),d&=mb(f[m],
-e,!1,c)}return d}return lb(a,new X(b,this))},hb=function(a){a=a[ab];return a instanceof $a?a:null},nb="__closure_events_fn_"+(1E9*Math.random()>>>0),gb=function(a){N(a,"Listener can not be null.");if("function"==K(a))return a;N(a.handleEvent,"An object listener must have handleEvent method.");return a[nb]||(a[nb]=function(b){return a.handleEvent(b)})};var ob=function(a,b,c){t:if(c=sa(c),void 0===a[F][c]){var d=(S?"Webkit":R?"Moz":Q?"ms":Ea?"O":null)+ta(c);if(void 0!==a[F][d]){c=d;break t}}c&&(a[F][c]=b)};var pb=function(a,b){var c=[];1<arguments[C]&&(c=k[D][u][G](arguments)[u](1));var d=Ma(g,"th","tct-selectall",a);if(0!=d[C]){var d=d[0],f=0,e=Ma(g,"tbody",null,a);e[C]&&(f=e[0].rows[C]);this.d=Qa("input",{type:"checkbox"});d.appendChild(this.d);f?fb(this.d,"click",this.s,!1,this):r(this.d,!0);this.f=[];this.h=[];this.i=[];d=Ma(g,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[A]&&e!=this.d?(this.f[s](e),fb(e,"click",this.r,!1,this)):"action"==e[ba]&&(0<=c[y](e[w])?this.i[s](e):this.h[s](e),r(e,!0))}};
-I=pb[D];I.f=null;I.b=0;I.d=null;I.h=null;I.i=null;I.s=function(a){for(var b=a[ea][da],c=a=0,d;d=this.f[c];c++)d.checked=b,a+=1;this.b=b?this.f[C]:0;for(c=0;b=this.h[c];c++)r(b,!this.b);for(c=0;b=this.i[c];c++)r(b,1!=a?!0:!1)};I.r=function(a){this.b+=a[ea][da]?1:-1;this.d.checked=this.b==this.f[C];a=0;for(var b;b=this.h[a];a++)r(b,!this.b);for(a=0;b=this.i[a];a++)r(b,1!=this.b?!0:!1)};var qb=function(){var a=U(g,"kinds");a&&new pb(a);(a=U(g,"pending_backups"))&&new pb(a);(a=U(g,"backups"))&&new pb(a,"Restore");var b=U(g,"ae-datastore-admin-filesystem");b&&fb(b,"change",function(){var a="gs"==Ra(b);U(g,"gs_bucket_tr")[F].display=a?"":"none"});if(a=U(g,"confirm_delete_form")){var c=U(g,"confirm_readonly_delete");c&&(a.onsubmit=function(){var a=U(g,"confirm_message");L("color")?ob(a,"red","color"):za("color",ha(ob,a));return c[da]})}},Y=["ae","Datastore","Admin","init"],Z=J;
-Y[0]in Z||!Z.execScript||Z.execScript("var "+Y[0]);for(var $;Y[C]&&($=Y[t]());)Y[C]||void 0===qb?Z=Z[$]?Z[$]:Z[$]={}:Z[$]=qb;
+return function(){var b=c[u]();b[s][H](b,arguments);return a[H](this,b)}},ia=function(a,b){function c(){}c.prototype=b[D];a.t=b[D];a.prototype=new c;a.A=function(a,c,e){return b[D][c][H](a,k[D][u][G](arguments,2))}};var M=function(a){if(l.captureStackTrace)l.captureStackTrace(this,M);else{var b=l().stack;b&&(this.stack=b)}a&&(this.message=n(a))};ia(M,l);M[D].name="CustomError";var ja=function(a,b){for(var c=a[E]("%s"),d="",f=k[D][u][G](arguments,1);f[C]&&1<c[C];)d+=c[t]()+f[t]();return d+c.join("%s")},ra=function(a,b){if(b)a=a[v](ka,"&amp;")[v](la,"&lt;")[v](ma,"&gt;")[v](na,"&quot;")[v](oa,"&#39;")[v](pa,"&#0;");else{if(!qa.test(a))return a;-1!=a[y]("&")&&(a=a[v](ka,"&amp;"));-1!=a[y]("<")&&(a=a[v](la,"&lt;"));-1!=a[y](">")&&(a=a[v](ma,"&gt;"));-1!=a[y]('"')&&(a=a[v](na,"&quot;"));-1!=a[y]("'")&&(a=a[v](oa,"&#39;"));-1!=a[y]("\x00")&&(a=a[v](pa,"&#0;"))}return a},ka=/&/g,
+la=/</g,ma=/>/g,na=/"/g,oa=/'/g,pa=/\x00/g,qa=/[\x00&<>"']/,sa=function(a,b){return a<b?-1:a>b?1:0},ta=function(a){return n(a)[v](/\-([a-z])/g,function(a,c){return c.toUpperCase()})},ua=function(a,b){var c=L(b)?n(b)[v](/([-()\[\]{}+?*.$\^|,:#<!\\])/g,"\\$1")[v](/\x08/g,"\\x08"):"\\s";return a[v](new RegExp("(^"+(c?"|["+c+"]+":"")+")([a-z])","g"),function(a,b,c){return b+c.toUpperCase()})};var va=function(a,b){b.unshift(a);M[G](this,ja[H](null,b));b[t]()};ia(va,M);va[D].name="AssertionError";var N=function(a,b,c){if(!a){var d="Assertion failed";if(b)var d=d+(": "+b),f=k[D][u][G](arguments,2);throw new va(""+d,f||[]);}return a};var O=k[D],wa=O[y]?function(a,b,c){N(null!=a[C]);return O[y][G](a,b,c)}:function(a,b,c){c=null==c?0:0>c?Math.max(0,a[C]+c):c;if(L(a))return L(b)&&1==b[C]?a[y](b,c):-1;for(;c<a[C];c++)if(c in a&&a[c]===b)return c;return-1},xa=O.forEach?function(a,b,c){N(null!=a[C]);O.forEach[G](a,b,c)}:function(a,b,c){for(var d=a[C],f=L(a)?a[E](""):a,e=0;e<d;e++)e in f&&b[G](c,f[e],e,a)},ya=function(a){var b=a[C];if(0<b){for(var c=k(b),d=0;d<b;d++)c[d]=a[d];return c}return[]},za=function(a,b,c){N(null!=a[C]);return 2>=
+arguments[C]?O[u][G](a,b):O[u][G](a,b,c)};var Aa=function(a,b,c){for(var d in a)b[G](c,a[d],d,a)},Ba="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Ca=function(a,b){for(var c,d,f=1;f<arguments[C];f++){d=arguments[f];for(c in d)a[c]=d[c];for(var e=0;e<Ba[C];e++)c=Ba[e],Object[D].hasOwnProperty[G](d,c)&&(a[c]=d[c])}};var P;t:{var Da=J.navigator;if(Da){var Ea=Da.userAgent;if(Ea){P=Ea;break t}}P=""};var Fa=-1!=P[y]("Opera")||-1!=P[y]("OPR"),Q=-1!=P[y]("Trident")||-1!=P[y]("MSIE"),R=-1!=P[y]("Gecko")&&-1==P.toLowerCase()[y]("webkit")&&!(-1!=P[y]("Trident")||-1!=P[y]("MSIE")),S=-1!=P.toLowerCase()[y]("webkit"),Ga=function(){var a=J.document;return a?a.documentMode:void 0},Ha=function(){var a="",b;if(Fa&&J.opera)return a=J.opera.version,"function"==K(a)?a():a;R?b=/rv\:([^\);]+)(\)|;)/:Q?b=/\b(?:MSIE|rv)[: ]([^\);]+)(\)|;)/:S&&(b=/WebKit\/(\S+)/);b&&(a=(a=b.exec(P))?a[1]:"");return Q&&(b=Ga(),b>
+parseFloat(a))?n(b):a}(),Ia={},T=function(a){var b;if(!(b=Ia[a])){b=0;for(var c=n(Ha)[v](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),d=n(a)[v](/^[\s\xa0]+|[\s\xa0]+$/g,"")[E]("."),f=Math.max(c[C],d[C]),e=0;0==b&&e<f;e++){var h=c[e]||"",m=d[e]||"",eb=RegExp("(\\d*)(\\D*)","g"),fb=RegExp("(\\d*)(\\D*)","g");do{var V=eb.exec(h)||["","",""],W=fb.exec(m)||["","",""];if(0==V[0][C]&&0==W[0][C])break;b=sa(0==V[1][C]?0:aa(V[1],10),0==W[1][C]?0:aa(W[1],10))||sa(0==V[2][C],0==W[2][C])||sa(V[2],W[2])}while(0==b)}b=
+Ia[a]=0<=b}return b},Ja=J.document,Ka=Ja&&Q?Ga()||("CSS1Compat"==Ja.compatMode?aa(Ha,10):5):void 0;var La=!Q||Q&&9<=Ka;!R&&!Q||Q&&Q&&9<=Ka||R&&T("1.9.1");Q&&T("9");var Ma=function(a,b){var c;c=a.className;c=L(c)&&c.match(/\S+/g)||[];for(var d=za(arguments,1),f=c[C]+d[C],e=c,h=0;h<d[C];h++)0<=wa(e,d[h])||e[s](d[h]);a.className=c.join(" ");return c[C]==f};var U=function(a,b){return L(b)?a.getElementById(b):b},Na=function(a,b,c,d){a=d||a;var f=b&&"*"!=b?b.toUpperCase():"";if(a.querySelectorAll&&a.querySelector&&(f||c))return a.querySelectorAll(f+(c?"."+c:""));if(c&&a.getElementsByClassName){b=a.getElementsByClassName(c);if(f){a={};for(var e=d=0,h;h=b[e];e++)f==h.nodeName&&(a[d++]=h);a.length=d;return a}return b}b=a.getElementsByTagName(f||"*");if(c){a={};for(e=d=0;h=b[e];e++){var f=h.className,m;if(m="function"==typeof f[E])m=0<=wa(f[E](/\s+/),c);m&&
+(a[d++]=h)}a.length=d;return a}return b},Pa=function(a,b){Aa(b,function(b,d){"style"==d?a[F].cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in Oa?a.setAttribute(Oa[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},Oa={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan",type:"type",usemap:"useMap",valign:"vAlign",width:"width"},Ra=function(a,
+b,c){var d=arguments,f=d[0],e=d[1];if(!La&&e&&(e[ba]||e[A])){f=["<",f];e[ba]&&f[s](' name="',ra(e[ba]),'"');if(e[A]){f[s](' type="',ra(e[A]),'"');var h={};Ca(h,e);delete h[A];e=h}f[s](">");f=f.join("")}f=g.createElement(f);e&&(L(e)?f.className=e:"array"==K(e)?Ma[H](null,[f].concat(e)):Pa(f,e));2<d[C]&&Qa(g,f,d,2);return f},Qa=function(a,b,c,d){function f(c){c&&b.appendChild(L(c)?a.createTextNode(c):c)}for(;d<c[C];d++){var e=c[d];if(!fa(e)||ga(e)&&0<e.nodeType)f(e);else{var h;t:{if(e&&"number"==typeof e[C]){if(ga(e)){h=
+"function"==typeof e.item||"string"==typeof e.item;break t}if("function"==K(e)){h="function"==typeof e.item;break t}}h=!1}xa(h?ya(e):e,f)}}};var Sa=function(a){var b=a[A];if(void 0===b)return null;switch(b.toLowerCase()){case "checkbox":case "radio":return a[da]?a[w]:null;case "select-one":return b=a.selectedIndex,0<=b?a.options[b][w]:null;case "select-multiple":for(var b=[],c,d=0;c=a.options[d];d++)c.selected&&b[s](c[w]);return b[C]?b:null;default:return void 0!==a[w]?a[w]:null}};var Ta=function(a){Ta[" "](a);return a};Ta[" "]=function(){};var Ua=!Q||Q&&9<=Ka,Va=Q&&!T("9");!S||T("528");R&&T("1.9b")||Q&&T("8")||Fa&&T("9.5")||S&&T("528");R&&!T("8")||Q&&T("9");var Wa=function(a,b){this.type=a;this.target=b;p(this,this[ea]);this.defaultPrevented=this.o=!1};Wa[D].preventDefault=function(){this.defaultPrevented=!0};var X=function(a,b){Wa[G](this,a?a[A]:"");this.target=null;p(this,null);this.relatedTarget=null;this.button=this.screenY=this.screenX=this.clientY=this.clientX=this.offsetY=this.offsetX=0;q(this,0);this.charCode=0;this.metaKey=this.shiftKey=this.altKey=this.ctrlKey=!1;this.p=this.state=null;a&&this.u(a,b)};ia(X,Wa);
+X[D].u=function(a,b){var c=this.type=a[A];this.target=a[ea]||a.srcElement;p(this,b);var d=a.relatedTarget;if(d){if(R){var f;t:{try{Ta(d.nodeName);f=!0;break t}catch(e){}f=!1}f||(d=null)}}else"mouseover"==c?d=a.fromElement:"mouseout"==c&&(d=a.toElement);this.relatedTarget=d;this.offsetX=S||void 0!==a.offsetX?a.offsetX:a.layerX;this.offsetY=S||void 0!==a.offsetY?a.offsetY:a.layerY;this.clientX=void 0!==a.clientX?a.clientX:a.pageX;this.clientY=void 0!==a.clientY?a.clientY:a.pageY;this.screenX=a.screenX||
+0;this.screenY=a.screenY||0;this.button=a.button;q(this,a[z]||0);this.charCode=a.charCode||("keypress"==c?a[z]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.state=a.state;this.p=a;a.defaultPrevented&&this[x]()};X[D].preventDefault=function(){X.t[x][G](this);var a=this.p;if(a[x])a[x]();else if(a.returnValue=!1,Va)try{(a.ctrlKey||112<=a[z]&&123>=a[z])&&q(a,-1)}catch(b){}};var Xa="closure_listenable_"+(1E6*Math.random()|0),Ya=function(a){try{return!(!a||!a[Xa])}catch(b){return!1}},Za=0;var $a=function(a,b,c,d,f,e){this.c=a;this.g=b;this.src=c;this.type=d;this.k=!!f;this.j=e;this.key=++Za;this.e=this.l=!1};$a[D].n=function(){this.e=!0;this.j=this.src=this.g=this.c=null};var ab=function(a){this.src=a;this.a={};this.m=0};ab[D].add=function(a,b,c,d,f){var e=a[B]();a=this.a[e];a||(a=this.a[e]=[],this.m++);var h;t:{for(h=0;h<a[C];++h){var m=a[h];if(!m.e&&m.c==b&&m.k==!!d&&m.j==f)break t}h=-1}-1<h?(b=a[h],c||(b.l=!1)):(b=new $a(b,null,this.src,e,!!d,f),b.l=c,a[s](b));return b};ab[D].q=function(a){var b=a[A];if(!(b in this.a))return!1;var c=this.a[b],d=wa(c,a),f;if(f=0<=d)N(null!=c[C]),O.splice[G](c,d,1);f&&(a.n(),0==this.a[b][C]&&(delete this.a[b],this.m--));return f};var bb="closure_lm_"+(1E6*Math.random()|0),cb={},db=0,gb=function(a,b,c,d,f){if("array"==K(b)){for(var e=0;e<b[C];e++)gb(a,b[e],c,d,f);return null}c=hb(c);if(Ya(a))a=a.w(b,c,d,f);else{if(!b)throw l("Invalid event type");var e=!!d,h=ib(a);h||(a[bb]=h=new ab(a));c=h.add(b,c,!1,d,f);c.g||(d=jb(),c.g=d,d.src=a,d.c=c,a.addEventListener?a.addEventListener(b[B](),d,e):a.attachEvent(kb(b[B]()),d),db++);a=c}return a},jb=function(){var a=lb,b=Ua?function(c){return a[G](b.src,b.c,c)}:function(c){c=a[G](b.src,
+b.c,c);if(!c)return c};return b},kb=function(a){return a in cb?cb[a]:cb[a]="on"+a},nb=function(a,b,c,d){var f=1;if(a=ib(a))if(b=a.a[b[B]()])for(b=b.concat(),a=0;a<b[C];a++){var e=b[a];e&&e.k==c&&!e.e&&(f&=!1!==mb(e,d))}return Boolean(f)},mb=function(a,b){var c=a.c,d=a.j||a.src;if(a.l&&"number"!=typeof a&&a&&!a.e){var f=a.src;if(Ya(f))f.v(a);else{var e=a[A],h=a.g;f.removeEventListener?f.removeEventListener(e,h,a.k):f.detachEvent&&f.detachEvent(kb(e),h);db--;(e=ib(f))?(e.q(a),0==e.m&&(e.src=null,f[bb]=
+null)):a.n()}}return c[G](d,b)},lb=function(a,b){if(a.e)return!0;if(!Ua){var c;if(!(c=b))t:{c=["window","event"];for(var d=J,f;f=c[t]();)if(null!=d[f])d=d[f];else{c=null;break t}c=d}f=c;c=new X(f,this);d=!0;if(!(0>f[z]||void 0!=f.returnValue)){t:{var e=!1;if(0==f[z])try{q(f,-1);break t}catch(h){e=!0}if(e||void 0==f.returnValue)f.returnValue=!0}f=[];for(e=c.currentTarget;e;e=e.parentNode)f[s](e);for(var e=a[A],m=f[C]-1;!c.o&&0<=m;m--)p(c,f[m]),d&=nb(f[m],e,!0,c);for(m=0;!c.o&&m<f[C];m++)p(c,f[m]),
+d&=nb(f[m],e,!1,c)}return d}return mb(a,new X(b,this))},ib=function(a){a=a[bb];return a instanceof ab?a:null},ob="__closure_events_fn_"+(1E9*Math.random()>>>0),hb=function(a){N(a,"Listener can not be null.");if("function"==K(a))return a;N(a.handleEvent,"An object listener must have handleEvent method.");return a[ob]||(a[ob]=function(b){return a.handleEvent(b)})};var pb=function(a,b,c){t:if(c=ta(c),void 0===a[F][c]){var d=(S?"Webkit":R?"Moz":Q?"ms":Fa?"O":null)+ua(c);if(void 0!==a[F][d]){c=d;break t}}c&&(a[F][c]=b)};var qb=function(a,b){var c=[];1<arguments[C]&&(c=k[D][u][G](arguments)[u](1));var d=Na(g,"th","tct-selectall",a);if(0!=d[C]){var d=d[0],f=0,e=Na(g,"tbody",null,a);e[C]&&(f=e[0].rows[C]);this.d=Ra("input",{type:"checkbox"});d.appendChild(this.d);f?gb(this.d,"click",this.s,!1,this):r(this.d,!0);this.f=[];this.h=[];this.i=[];d=Na(g,"input",null,a);for(f=0;e=d[f];f++)"checkbox"==e[A]&&e!=this.d?(this.f[s](e),gb(e,"click",this.r,!1,this)):"action"==e[ba]&&(0<=c[y](e[w])?this.i[s](e):this.h[s](e),r(e,!0))}};
+I=qb[D];I.f=null;I.b=0;I.d=null;I.h=null;I.i=null;I.s=function(a){for(var b=a[ea][da],c=a=0,d;d=this.f[c];c++)d.checked=b,a+=1;this.b=b?this.f[C]:0;for(c=0;b=this.h[c];c++)r(b,!this.b);for(c=0;b=this.i[c];c++)r(b,1!=a?!0:!1)};I.r=function(a){this.b+=a[ea][da]?1:-1;this.d.checked=this.b==this.f[C];a=0;for(var b;b=this.h[a];a++)r(b,!this.b);for(a=0;b=this.i[a];a++)r(b,1!=this.b?!0:!1)};var rb=function(){var a=U(g,"kinds");a&&new qb(a);(a=U(g,"pending_backups"))&&new qb(a);(a=U(g,"backups"))&&new qb(a,"Restore");var b=U(g,"ae-datastore-admin-filesystem");b&&gb(b,"change",function(){var a="gs"==Sa(b);U(g,"gs_bucket_tr")[F].display=a?"":"none"});if(a=U(g,"confirm_delete_form")){var c=U(g,"confirm_readonly_delete");c&&(a.onsubmit=function(){var a=U(g,"confirm_message");L("color")?pb(a,"red","color"):Aa("color",ha(pb,a));return c[da]})}},Y=["ae","Datastore","Admin","init"],Z=J;
+Y[0]in Z||!Z.execScript||Z.execScript("var "+Y[0]);for(var $;Y[C]&&($=Y[t]());)Y[C]||void 0===rb?Z=Z[$]?Z[$]:Z[$]={}:Z[$]=rb;
diff --git a/google/appengine/ext/mapreduce/namespace_range.py b/google/appengine/ext/mapreduce/namespace_range.py
index b4be917..b2d2992 100644
--- a/google/appengine/ext/mapreduce/namespace_range.py
+++ b/google/appengine/ext/mapreduce/namespace_range.py
@@ -28,8 +28,6 @@
 
 
 
-
-
 """Represents a lexographic range of namespaces."""
 
 
@@ -68,6 +66,7 @@
 
   global NAMESPACE_CHARACTERS
   global MAX_NAMESPACE_LENGTH
+
   global MAX_NAMESPACE
   global _LEX_DISTANCE
   global NAMESPACE_BATCH_SIZE
@@ -101,6 +100,7 @@
   for i in range(1, MAX_NAMESPACE_LENGTH):
     _LEX_DISTANCE.append(
         _LEX_DISTANCE[i-1] * len(NAMESPACE_CHARACTERS) + 1)
+
   del i
 _setup_constants()
 
@@ -122,7 +122,7 @@
 
   Args:
     n: A number representing the lexographical ordering of a namespace.
-
+    _max_length: The maximum namespace length.
   Returns:
     A string representing the nth namespace in lexographical order.
   """
@@ -191,6 +191,7 @@
                namespace_start=None,
                namespace_end=None,
                _app=None):
+
     """Initializes a NamespaceRange instance.
 
     Args:
@@ -359,6 +360,7 @@
             can_query=itertools.chain(itertools.repeat(True, 50),
                                       itertools.repeat(False)).next,
             _app=None):
+
     """Splits the complete NamespaceRange into n equally-sized NamespaceRanges.
 
     Args:
@@ -384,15 +386,32 @@
     if n < 1:
       raise ValueError('n must be >= 1')
 
-    ns_range = NamespaceRange(_app=_app)
+    ranges = None
     if can_query():
-      ns_range = ns_range.normalized_start()
-      if ns_range is None:
-        if contiguous:
-          return [NamespaceRange(_app=_app)]
-        else:
+      if not contiguous:
+        ns_keys = get_namespace_keys(_app, n + 1)
+        if not ns_keys:
           return []
-    ranges = [ns_range]
+        else:
+          if len(ns_keys) <= n:
+
+
+            ns_range = []
+            for ns_key in ns_keys:
+              ns_range.append(NamespaceRange(ns_key.name() or '',
+                                             ns_key.name() or '',
+                                             _app=_app))
+            return sorted(ns_range,
+                          key=lambda ns_range: ns_range.namespace_start)
+
+          ranges = [NamespaceRange(ns_keys[0].name() or '', _app=_app)]
+      else:
+        ns_range = NamespaceRange(_app=_app).normalized_start()
+        if ns_range is None:
+          return [NamespaceRange(_app=_app)]
+        ranges = [ns_range]
+    else:
+      ranges = [NamespaceRange(_app=_app)]
 
     singles = []
     while ranges and (len(ranges) + len(singles)) < n:
@@ -452,4 +471,4 @@
 def get_namespace_keys(app, limit):
   """Get namespace keys."""
   ns_query = datastore.Query('__namespace__', keys_only=True, _app=app)
-  return list(ns_query.Run(limit=limit))
+  return list(ns_query.Run(limit=limit, batch_size=limit))
diff --git a/google/appengine/tools/app_engine_web_xml_parser.py b/google/appengine/tools/app_engine_web_xml_parser.py
index 3398896..edcee56 100644
--- a/google/appengine/tools/app_engine_web_xml_parser.py
+++ b/google/appengine/tools/app_engine_web_xml_parser.py
@@ -165,10 +165,7 @@
     for sub_node in xml_parser_utils.GetNodes(node, 'include'):
       path = xml_parser_utils.GetAttribute(sub_node, 'path').strip()
       expiration = xml_parser_utils.GetAttribute(sub_node, 'expiration').strip()
-      static_file_include = StaticFileInclude()
-      static_file_include.pattern = path
-      static_file_include.expiration = expiration
-      static_file_include.http_headers = {}
+      static_file_include = StaticFileInclude(path, expiration, {})
 
       for http_header_node in xml_parser_utils.GetNodes(
           sub_node, 'http-header'):
@@ -556,4 +553,8 @@
 
 class StaticFileInclude(ValueMixin):
   """Instances describe static files to be included in app configuration."""
-  pass
+
+  def __init__(self, pattern, expiration, http_headers):
+    self.pattern = pattern
+    self.expiration = expiration
+    self.http_headers = http_headers
diff --git a/google/appengine/tools/dev-channel-js.js b/google/appengine/tools/dev-channel-js.js
index fdce34e..060c245 100644
--- a/google/appengine/tools/dev-channel-js.js
+++ b/google/appengine/tools/dev-channel-js.js
@@ -23,7 +23,7 @@
 };
 goog.setTestOnly = function(opt_message) {
   if (!goog.DEBUG) {
-    throw opt_message = opt_message || "", Error("Importing test-only code into non-debug environment" + opt_message ? ": " + opt_message : ".");
+    throw opt_message = opt_message || "", Error("Importing test-only code into non-debug environment" + (opt_message ? ": " + opt_message : "."));
   }
 };
 goog.forwardDeclare = function() {
@@ -385,6 +385,40 @@
 }, Function.prototype.mixin = function(source) {
   goog.mixin(this.prototype, source);
 });
+goog.defineClass = function(superClass, def) {
+  var constructor = def.constructor, statics = def.statics;
+  if (!constructor || constructor == Object.prototype.constructor) {
+    throw Error("constructor property is required.");
+  }
+  var cls = goog.defineClass.createSealingConstructor_(constructor);
+  superClass && goog.inherits(cls, superClass);
+  delete def.constructor;
+  delete def.statics;
+  goog.defineClass.applyProperties_(cls.prototype, def);
+  null != statics && (statics instanceof Function ? statics(cls) : goog.defineClass.applyProperties_(cls, statics));
+  return cls;
+};
+goog.defineClass.SEAL_CLASS_INSTANCES = goog.DEBUG;
+goog.defineClass.createSealingConstructor_ = function(ctr) {
+  if (goog.defineClass.SEAL_CLASS_INSTANCES && Object.seal instanceof Function) {
+    var wrappedCtr = function() {
+      var instance = ctr.apply(this, arguments) || this;
+      this.constructor === wrappedCtr && Object.seal(instance);
+      return instance;
+    };
+    return wrappedCtr;
+  }
+  return ctr;
+};
+goog.defineClass.OBJECT_PROTOTYPE_FIELDS_ = "constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" ");
+goog.defineClass.applyProperties_ = function(target, source) {
+  for (var key in source) {
+    Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
+  }
+  for (var i = 0;i < goog.defineClass.OBJECT_PROTOTYPE_FIELDS_.length;i++) {
+    key = goog.defineClass.OBJECT_PROTOTYPE_FIELDS_[i], Object.prototype.hasOwnProperty.call(source, key) && (target[key] = source[key]);
+  }
+};
 goog.debug = {};
 goog.debug.Error = function(opt_msg) {
   if (Error.captureStackTrace) {
@@ -400,6 +434,7 @@
 goog.dom = {};
 goog.dom.NodeType = {ELEMENT:1, ATTRIBUTE:2, TEXT:3, CDATA_SECTION:4, ENTITY_REFERENCE:5, ENTITY:6, PROCESSING_INSTRUCTION:7, COMMENT:8, DOCUMENT:9, DOCUMENT_TYPE:10, DOCUMENT_FRAGMENT:11, NOTATION:12};
 goog.string = {};
+goog.string.DETECT_DOUBLE_ESCAPING = !1;
 goog.string.Unicode = {NBSP:"\u00a0"};
 goog.string.startsWith = function(str, prefix) {
   return 0 == str.lastIndexOf(prefix, 0);
@@ -515,24 +550,29 @@
 };
 goog.string.htmlEscape = function(str, opt_isLikelyToContainHtmlChars) {
   if (opt_isLikelyToContainHtmlChars) {
-    return str.replace(goog.string.amperRe_, "&amp;").replace(goog.string.ltRe_, "&lt;").replace(goog.string.gtRe_, "&gt;").replace(goog.string.quotRe_, "&quot;").replace(goog.string.singleQuoteRe_, "&#39;");
+    str = str.replace(goog.string.AMP_RE_, "&amp;").replace(goog.string.LT_RE_, "&lt;").replace(goog.string.GT_RE_, "&gt;").replace(goog.string.QUOT_RE_, "&quot;").replace(goog.string.SINGLE_QUOTE_RE_, "&#39;").replace(goog.string.NULL_RE_, "&#0;"), goog.string.DETECT_DOUBLE_ESCAPING && (str = str.replace(goog.string.E_RE_, "&#101;"));
+  } else {
+    if (!goog.string.ALL_RE_.test(str)) {
+      return str;
+    }
+    -1 != str.indexOf("&") && (str = str.replace(goog.string.AMP_RE_, "&amp;"));
+    -1 != str.indexOf("<") && (str = str.replace(goog.string.LT_RE_, "&lt;"));
+    -1 != str.indexOf(">") && (str = str.replace(goog.string.GT_RE_, "&gt;"));
+    -1 != str.indexOf('"') && (str = str.replace(goog.string.QUOT_RE_, "&quot;"));
+    -1 != str.indexOf("'") && (str = str.replace(goog.string.SINGLE_QUOTE_RE_, "&#39;"));
+    -1 != str.indexOf("\x00") && (str = str.replace(goog.string.NULL_RE_, "&#0;"));
+    goog.string.DETECT_DOUBLE_ESCAPING && -1 != str.indexOf("e") && (str = str.replace(goog.string.E_RE_, "&#101;"));
   }
-  if (!goog.string.allRe_.test(str)) {
-    return str;
-  }
-  -1 != str.indexOf("&") && (str = str.replace(goog.string.amperRe_, "&amp;"));
-  -1 != str.indexOf("<") && (str = str.replace(goog.string.ltRe_, "&lt;"));
-  -1 != str.indexOf(">") && (str = str.replace(goog.string.gtRe_, "&gt;"));
-  -1 != str.indexOf('"') && (str = str.replace(goog.string.quotRe_, "&quot;"));
-  -1 != str.indexOf("'") && (str = str.replace(goog.string.singleQuoteRe_, "&#39;"));
   return str;
 };
-goog.string.amperRe_ = /&/g;
-goog.string.ltRe_ = /</g;
-goog.string.gtRe_ = />/g;
-goog.string.quotRe_ = /"/g;
-goog.string.singleQuoteRe_ = /'/g;
-goog.string.allRe_ = /[&<>"']/;
+goog.string.AMP_RE_ = /&/g;
+goog.string.LT_RE_ = /</g;
+goog.string.GT_RE_ = />/g;
+goog.string.QUOT_RE_ = /"/g;
+goog.string.SINGLE_QUOTE_RE_ = /'/g;
+goog.string.NULL_RE_ = /\x00/g;
+goog.string.E_RE_ = /e/g;
+goog.string.ALL_RE_ = goog.string.DETECT_DOUBLE_ESCAPING ? /[\x00&<>"'e]/ : /[\x00&<>"']/;
 goog.string.unescapeEntities = function(str) {
   return goog.string.contains(str, "&") ? "document" in goog.global ? goog.string.unescapeEntitiesUsingDom_(str) : goog.string.unescapePureXmlEntities_(str) : str;
 };
@@ -541,7 +581,7 @@
 };
 goog.string.unescapeEntitiesUsingDom_ = function(str, opt_document) {
   var seen = {"&amp;":"&", "&lt;":"<", "&gt;":">", "&quot;":'"'}, div;
-  div = opt_document ? opt_document.createElement("div") : document.createElement("div");
+  div = opt_document ? opt_document.createElement("div") : goog.global.document.createElement("div");
   return str.replace(goog.string.HTML_ENTITY_PATTERN_, function(s, entity) {
     var value = seen[s];
     if (value) {
@@ -581,6 +621,9 @@
 goog.string.whitespaceEscape = function(str, opt_xml) {
   return goog.string.newLineToBr(str.replace(/  /g, " &#160;"), opt_xml);
 };
+goog.string.preserveSpaces = function(str) {
+  return str.replace(/(^|[\n ]) /g, "$1" + goog.string.Unicode.NBSP);
+};
 goog.string.stripQuotes = function(str, quoteChars) {
   for (var length = quoteChars.length, i = 0;i < length;i++) {
     var quoteChar = 1 == length ? quoteChars : quoteChars.charAt(i);
@@ -2143,7 +2186,11 @@
 goog.dom.getDocumentHeight_ = function(win) {
   var doc = win.document, height = 0;
   if (doc) {
-    var vh = goog.dom.getViewportSize_(win).height, body = doc.body, docEl = doc.documentElement;
+    var body = doc.body, docEl = doc.documentElement;
+    if (!body && !docEl) {
+      return 0;
+    }
+    var vh = goog.dom.getViewportSize_(win).height;
     if (goog.dom.isCss1CompatMode_(doc) && docEl.scrollHeight) {
       height = docEl.scrollHeight != vh ? docEl.scrollHeight : docEl.offsetHeight;
     } else {
@@ -2701,7 +2748,7 @@
 };
 goog.dom.getPixelRatio = goog.functions.cacheReturnValue(function() {
   var win = goog.dom.getWindow(), isFirefoxMobile = goog.userAgent.GECKO && goog.userAgent.MOBILE;
-  return goog.isDef(win.devicePixelRatio) && !isFirefoxMobile ? win.devicePixelRatio : win.matchMedia ? goog.dom.matchesPixelRatio_(0.75) || goog.dom.matchesPixelRatio_(1.5) || goog.dom.matchesPixelRatio_(2) || goog.dom.matchesPixelRatio_(3) || 1 : 1;
+  return goog.isDef(win.devicePixelRatio) && !isFirefoxMobile ? win.devicePixelRatio : win.matchMedia ? goog.dom.matchesPixelRatio_(.75) || goog.dom.matchesPixelRatio_(1.5) || goog.dom.matchesPixelRatio_(2) || goog.dom.matchesPixelRatio_(3) || 1 : 1;
 });
 goog.dom.matchesPixelRatio_ = function(pixelRatio) {
   var win = goog.dom.getWindow(), query = "(-webkit-min-device-pixel-ratio: " + pixelRatio + "),(min--moz-device-pixel-ratio: " + pixelRatio + "),(min-resolution: " + pixelRatio + "dppx)";
@@ -3262,7 +3309,7 @@
   var count = 0, typeStr = opt_type && opt_type.toString(), type;
   for (type in listenerMap.listeners) {
     if (!typeStr || type == typeStr) {
-      for (var listeners = goog.array.clone(listenerMap.listeners[type]), i = 0;i < listeners.length;++i) {
+      for (var listeners = listenerMap.listeners[type].concat(), i = 0;i < listeners.length;++i) {
         goog.events.unlistenByKey(listeners[i]) && ++count;
       }
     }
@@ -3319,7 +3366,7 @@
   if (listenerMap) {
     var listenerArray = listenerMap.listeners[type.toString()];
     if (listenerArray) {
-      for (var listenerArray = goog.array.clone(listenerArray), i = 0;i < listenerArray.length;i++) {
+      for (var listenerArray = listenerArray.concat(), i = 0;i < listenerArray.length;i++) {
         var listener = listenerArray[i];
         listener && listener.capture == capture && !listener.removed && (retval &= !1 !== goog.events.fireListener(listener, eventObject));
       }
@@ -3462,7 +3509,7 @@
   if (!listenerArray) {
     return!0;
   }
-  for (var listenerArray = goog.array.clone(listenerArray), rv = !0, i = 0;i < listenerArray.length;++i) {
+  for (var listenerArray = listenerArray.concat(), rv = !0, i = 0;i < listenerArray.length;++i) {
     var listener = listenerArray[i];
     if (listener && !listener.removed && listener.capture == capture) {
       var listenerFn = listener.listener, listenerHandler = listener.handler || listener.src;
@@ -4695,6 +4742,7 @@
   this.name_ = name;
   this.handlers_ = this.children_ = this.level_ = this.parent_ = null;
 };
+goog.debug.Logger.ROOT_LOGGER_NAME = "";
 goog.debug.Logger.ENABLE_HIERARCHY = !0;
 goog.debug.Logger.ENABLE_HIERARCHY || (goog.debug.Logger.rootHandlers_ = []);
 goog.debug.Logger.Level = function(name, value) {
@@ -4837,7 +4885,7 @@
 goog.debug.LogManager.loggers_ = {};
 goog.debug.LogManager.rootLogger_ = null;
 goog.debug.LogManager.initialize = function() {
-  goog.debug.LogManager.rootLogger_ || (goog.debug.LogManager.rootLogger_ = new goog.debug.Logger(""), goog.debug.LogManager.loggers_[""] = goog.debug.LogManager.rootLogger_, goog.debug.LogManager.rootLogger_.setLevel(goog.debug.Logger.Level.CONFIG));
+  goog.debug.LogManager.rootLogger_ || (goog.debug.LogManager.rootLogger_ = new goog.debug.Logger(goog.debug.Logger.ROOT_LOGGER_NAME), goog.debug.LogManager.loggers_[goog.debug.Logger.ROOT_LOGGER_NAME] = goog.debug.LogManager.rootLogger_, goog.debug.LogManager.rootLogger_.setLevel(goog.debug.Logger.Level.CONFIG));
 };
 goog.debug.LogManager.getLoggers = function() {
   return goog.debug.LogManager.loggers_;
@@ -4868,6 +4916,7 @@
 };
 goog.log = {};
 goog.log.ENABLED = goog.debug.LOGGING_ENABLED;
+goog.log.ROOT_LOGGER_NAME = goog.debug.Logger.ROOT_LOGGER_NAME;
 goog.log.Logger = goog.debug.Logger;
 goog.log.Level = goog.debug.Logger.Level;
 goog.log.LogRecord = goog.debug.LogRecord;
@@ -4911,7 +4960,7 @@
 goog.Timer.MAX_TIMEOUT_ = 2147483647;
 goog.Timer.prototype.enabled = !1;
 goog.Timer.defaultTimerObject = goog.global;
-goog.Timer.intervalScale = 0.8;
+goog.Timer.intervalScale = .8;
 goog.Timer.prototype.timer_ = null;
 goog.Timer.prototype.setInterval = function(interval) {
   this.interval_ = interval;
diff --git a/google/appengine/tools/devappserver2/application_configuration.py b/google/appengine/tools/devappserver2/application_configuration.py
index 92469d8..be4e2a5 100644
--- a/google/appengine/tools/devappserver2/application_configuration.py
+++ b/google/appengine/tools/devappserver2/application_configuration.py
@@ -336,15 +336,13 @@
     web_xml_path = os.path.join(web_inf_dir, 'web.xml')
     with open(web_xml_path) as f:
       web_xml_str = f.read()
-    static_files = []
-    # TODO: need to enumerate static files here
     has_jsps = False
     for _, _, filenames in os.walk(self.application_root):
       if any(f.endswith('.jsp') for f in filenames):
         has_jsps = True
         break
-    app_yaml_str = yaml_translator.TranslateXmlToYaml(
-        app_engine_web_xml_str, web_xml_str, static_files, has_jsps)
+    app_yaml_str = yaml_translator.TranslateXmlToYamlForDevAppServer(
+        app_engine_web_xml_str, web_xml_str, has_jsps, self.application_root)
     config = appinfo.LoadSingleAppInfo(app_yaml_str)
     return config, [app_engine_web_xml_path, web_xml_path]
 
diff --git a/google/appengine/tools/devappserver2/runtime_config_pb2.py b/google/appengine/tools/devappserver2/runtime_config_pb2.py
index 59a6ec9..7c5b825 100644
--- a/google/appengine/tools/devappserver2/runtime_config_pb2.py
+++ b/google/appengine/tools/devappserver2/runtime_config_pb2.py
@@ -22,9 +22,12 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 from google.net.proto2.proto import descriptor_pb2
 # @@protoc_insertion_point(imports)
 
+_sym_db = _symbol_database.Default()
+
 
 
 
@@ -33,6 +36,7 @@
   package='apphosting.tools.devappserver2',
   serialized_pb=_b('\n3apphosting/tools/devappserver2/runtime_config.proto\x12\x1e\x61pphosting.tools.devappserver2\"\xaf\x05\n\x06\x43onfig\x12\x0e\n\x06\x61pp_id\x18\x01 \x02(\x0c\x12\x12\n\nversion_id\x18\x02 \x02(\x0c\x12\x18\n\x10\x61pplication_root\x18\x03 \x02(\x0c\x12\x19\n\nthreadsafe\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x08\x61pi_host\x18\x11 \x01(\t:\tlocalhost\x12\x10\n\x08\x61pi_port\x18\x05 \x02(\x05\x12:\n\tlibraries\x18\x06 \x03(\x0b\x32\'.apphosting.tools.devappserver2.Library\x12\x16\n\nskip_files\x18\x07 \x01(\t:\x02^$\x12\x18\n\x0cstatic_files\x18\x08 \x01(\t:\x02^$\x12\x43\n\rpython_config\x18\x0e \x01(\x0b\x32,.apphosting.tools.devappserver2.PythonConfig\x12=\n\nphp_config\x18\t \x01(\x0b\x32).apphosting.tools.devappserver2.PhpConfig\x12\x38\n\x07\x65nviron\x18\n \x03(\x0b\x32\'.apphosting.tools.devappserver2.Environ\x12\x42\n\x10\x63loud_sql_config\x18\x0b \x01(\x0b\x32(.apphosting.tools.devappserver2.CloudSQL\x12\x12\n\ndatacenter\x18\x0c \x02(\t\x12\x13\n\x0binstance_id\x18\r \x02(\t\x12\x1b\n\x10stderr_log_level\x18\x0f \x01(\x03:\x01\x31\x12\x13\n\x0b\x61uth_domain\x18\x10 \x02(\t\x12\x15\n\rmax_instances\x18\x12 \x01(\x05\x12;\n\tvm_config\x18\x13 \x01(\x0b\x32(.apphosting.tools.devappserver2.VMConfig\"A\n\tPhpConfig\x12\x1b\n\x13php_executable_path\x18\x01 \x01(\x0c\x12\x17\n\x0f\x65nable_debugger\x18\x03 \x02(\x08\"<\n\x0cPythonConfig\x12\x16\n\x0estartup_script\x18\x01 \x01(\t\x12\x14\n\x0cstartup_args\x18\x02 \x01(\t\"t\n\x08\x43loudSQL\x12\x12\n\nmysql_host\x18\x01 \x02(\t\x12\x12\n\nmysql_port\x18\x02 \x02(\x05\x12\x12\n\nmysql_user\x18\x03 \x02(\t\x12\x16\n\x0emysql_password\x18\x04 \x02(\t\x12\x14\n\x0cmysql_socket\x18\x05 \x01(\t\"(\n\x07Library\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\t\"%\n\x07\x45nviron\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x02(\x0c\"%\n\x08VMConfig\x12\x19\n\x11\x64ocker_daemon_url\x18\x01 \x01(\tB2\n,com.google.appengine.tools.development.proto \x02P\x01')
 )
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
@@ -433,42 +437,49 @@
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.Config)
   ))
+_sym_db.RegisterMessage(Config)
 
 PhpConfig = _reflection.GeneratedProtocolMessageType('PhpConfig', (_message.Message,), dict(
   DESCRIPTOR = _PHPCONFIG,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.PhpConfig)
   ))
+_sym_db.RegisterMessage(PhpConfig)
 
 PythonConfig = _reflection.GeneratedProtocolMessageType('PythonConfig', (_message.Message,), dict(
   DESCRIPTOR = _PYTHONCONFIG,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.PythonConfig)
   ))
+_sym_db.RegisterMessage(PythonConfig)
 
 CloudSQL = _reflection.GeneratedProtocolMessageType('CloudSQL', (_message.Message,), dict(
   DESCRIPTOR = _CLOUDSQL,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.CloudSQL)
   ))
+_sym_db.RegisterMessage(CloudSQL)
 
 Library = _reflection.GeneratedProtocolMessageType('Library', (_message.Message,), dict(
   DESCRIPTOR = _LIBRARY,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.Library)
   ))
+_sym_db.RegisterMessage(Library)
 
 Environ = _reflection.GeneratedProtocolMessageType('Environ', (_message.Message,), dict(
   DESCRIPTOR = _ENVIRON,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.Environ)
   ))
+_sym_db.RegisterMessage(Environ)
 
 VMConfig = _reflection.GeneratedProtocolMessageType('VMConfig', (_message.Message,), dict(
   DESCRIPTOR = _VMCONFIG,
   __module__ = 'google.appengine.tools.devappserver2.runtime_config_pb2'
   # @@protoc_insertion_point(class_scope:apphosting.tools.devappserver2.VMConfig)
   ))
+_sym_db.RegisterMessage(VMConfig)
 
 
 DESCRIPTOR.has_options = True
diff --git a/google/appengine/tools/devappserver2/vm_runtime_proxy.py b/google/appengine/tools/devappserver2/vm_runtime_proxy.py
index 724ee36..39be229 100644
--- a/google/appengine/tools/devappserver2/vm_runtime_proxy.py
+++ b/google/appengine/tools/devappserver2/vm_runtime_proxy.py
@@ -118,7 +118,9 @@
                 'GAE_MODULE_VERSION': self._module_configuration.major_version,
                 'GAE_MINOR_VERSION': self._module_configuration.minor_version,
                 'GAE_MODULE_INSTANCE': runtime_config.instance_id},
-            volumes={'/var/log/app_engine/app': '/var/log/app_engine/app:rw'},
+            volumes={
+                '/var/log/app_engine/app': {'bind': '/var/log/app_engine/app'}
+            },
             volumes_from=None))
 
     self._container.Start()
diff --git a/google/appengine/tools/docker/containers.py b/google/appengine/tools/docker/containers.py
index 0cd1347..1c80097 100644
--- a/google/appengine/tools/docker/containers.py
+++ b/google/appengine/tools/docker/containers.py
@@ -27,36 +27,79 @@
 Image is a result of "docker build path/to/Dockerfile" command.
 Container is a result of "docker run image_tag" command.
 ImageOptions and ContainerOptions allow to pass parameters to these commands.
+
+Versions 1.6 and 1.10 of docker remote API are supported.
 """
 
 from collections import namedtuple
 
 import logging
+import re
+
+import google
+import docker
 
 
-ImageOptions = namedtuple(
-    'ImageOptions', [
-        # If this is None, no build is needed. We will be looking for the
-        # existing image with this tag and raise an error if it does not exist.
-        'dockerfile_dir',
-        'tag',
-        'nocache'
-    ]
-)
-# TODO: add rm option
+_SUCCESSFUL_BUILD_PATTERN = re.compile(
+    r'{"stream":"Successfully built ([a-zA-Z0-9]{12})\\n"}')
 
 
-ContainerOptions = namedtuple(
-    'ContainerOptions', [
-        'image_opts',
-        'port',
-        'environment',
-        # TODO: use another container to forward logs to
-        # instead of mounting host directory.
-        'volumes',
-        'volumes_from'
-    ]
-)
+class ImageOptions(namedtuple('ImageOptionsT',
+                              ['dockerfile_dir', 'tag', 'nocache', 'rm'])):
+  """Options for building Docker Images."""
+
+  def __new__(cls, dockerfile_dir=None, tag=None, nocache=False, rm=False):
+    """This method is redefined to provide default values for namedtuple.
+
+    Args:
+      dockerfile_dir: str, Path to the directory with the Dockerfile. If it is
+          None, no build is needed. We will be looking for the existing image
+          with the specified tag and raise an error if it does not exist.
+      tag: str, Repository name (and optionally a tag) to be applied to the
+          image in case of successful build. If dockerfile_dir is None, tag
+          is used for lookup of an image.
+      nocache: boolean, True if cache should not be used when building the
+          image.
+      rm: boolean, True if intermediate images should be removed after a
+          successful build.
+
+    Returns:
+      ImageOptions object.
+    """
+    return super(ImageOptions, cls).__new__(
+        cls, dockerfile_dir=dockerfile_dir, tag=tag, nocache=nocache, rm=rm)
+
+
+class ContainerOptions(namedtuple('ContainerOptionsT',
+                                  ['image_opts', 'port', 'port_bindings',
+                                   'environment', 'volumes', 'volumes_from',
+                                   'name'])):
+  """Options for creating and running Docker Containers."""
+
+  def __new__(cls, image_opts=None, port=None, port_bindings=None,
+              environment=None, volumes=None, volumes_from=None, name=None):
+    """This method is redefined to provide default values for namedtuple.
+
+    Args:
+      image_opts: ImageOptions, properties of underlying Docker Image.
+      port: int, Primary port that the process inside of a container is
+          listening on. If this port is not part of the port bindings
+          specified, a default binding will be added for this port.
+      port_bindings: dict, Port bindings for exposing multiple ports. If the
+          only binding needed is the default binding of just one port this
+          can be None.
+      environment: dict, Environment variables.
+      volumes: dict,  Volumes to mount from the host system.
+      volumes_from: list, Volumes from the specified container(s).
+      name: str, Name of a container. Needed for data containers.
+
+    Returns:
+      ContainerOptions object.
+    """
+    return super(ContainerOptions, cls).__new__(
+        cls, image_opts=image_opts, port=port, port_bindings=port_bindings,
+        environment=environment, volumes=volumes, volumes_from=volumes_from,
+        name=name)
 
 
 class Error(Exception):
@@ -85,7 +128,7 @@
     """
     self._docker_client = docker_client
     self._image_opts = image_opts
-    self._image_id = None
+    self._id = None
 
   def Build(self):
     """Calls "docker build" if needed."""
@@ -98,7 +141,8 @@
   @property
   def id(self):
     """Returns 64 hexadecimal digit string identifying the image."""
-    return self._image_id
+    # Might also be a first 12-characters shortcut.
+    return self._id
 
   @property
   def tag(self):
@@ -143,20 +187,46 @@
     super(Image, self).__init__(docker_client, image_opts)
 
   def Build(self):
-    """Calls "docker build"."""
+    """Calls "docker build".
+
+    Raises:
+      ImageError: if the image could not be built.
+    """
     logging.info('Building image %s...', self.tag)
-    self._image_id, _ = self._docker_client.build(
+
+    build_res = self._docker_client.build(
         path=self._image_opts.dockerfile_dir,
         tag=self.tag,
         quiet=False, fileobj=None, nocache=self._image_opts.nocache,
-        rm=False, stream=False)
-    logging.info('Image %s built.', self.tag)
+        rm=self._image_opts.rm, stream=False)
+
+    if isinstance(build_res, tuple):
+      # Older API returns pair (image_id, warnings)
+      self._id = build_res[0]
+    else:
+      # Newer API returns stream_helper generator where last message is saying
+      # about the success of the build.
+      for x in build_res:
+        m = _SUCCESSFUL_BUILD_PATTERN.match(x)
+        if m:
+          self._id = m.group(1)
+          break
+
+    if self.id:
+      logging.info('Image %s built, id = %s', self.tag, self.id)
+    else:
+      # TODO: figure out the build error.
+      raise ImageError('There was a build error for the image %s.', self.tag)
 
   def Remove(self):
     """Calls "docker rmi"."""
-    if self._image_id:
-      self._docker_client.remove_image(self.id)
-      self._image_id = None
+    if self._id:
+      try:
+        self._docker_client.remove_image(self.id)
+      except docker.errors.APIError:
+        logging.warning('Image %s cannot be removed because it is tagged in '
+                        'multiple repositories. Use -f to remove it.', self.id)
+      self._id = None
 
 
 class PrebuiltImage(BaseImage):
@@ -180,7 +250,7 @@
 
     if not image_opts.tag:
       raise ImageError('PrebuiltImage must have tag specified to find '
-                       'image_id.')
+                       'image id.')
 
     super(PrebuiltImage, self).__init__(docker_client, image_opts)
 
@@ -198,11 +268,11 @@
       raise ImageError('Image with tag %s was not found', self.tag)
 
     # TODO: check if it's possible to have more than one image returned.
-    self._image_id = images[0]
+    self._id = images[0]
 
   def Remove(self):
     """Unassigns image_id only, does not remove the image as we don't own it."""
-    self._image_id = None
+    self._id = None
 
 
 def CreateImage(docker_client, image_opts):
@@ -235,8 +305,13 @@
     self._container_opts = container_opts
 
     self._image = CreateImage(docker_client, container_opts.image_opts)
-    self._container_id = None
+    self._id = None
+    self._host = None
     self._port = None
+    # Port bindings will be set to a dictionary mapping exposed ports
+    # to the interface they are bound to. This will be populated from
+    # the container options passed when the container is started.
+    self._port_bindings = None
 
   def Start(self):
     """Builds an image (if necessary) and runs a container.
@@ -245,45 +320,75 @@
       ContainerError: if container_id is already set, i.e. container is already
           started.
     """
-    if self._container_id:
+    if self.id:
       raise ContainerError('Trying to start already running container.')
 
     self._image.Build()
 
     logging.info('Creating container...')
-    self._container_id = self._docker_client.create_container(
+    port_bindings = self._container_opts.port_bindings or {}
+    if self._container_opts.port:
+      # Add primary port to port bindings if not already specified.
+      # Setting its value to None lets docker pick any available port.
+      port_bindings[self._container_opts.port] = port_bindings.get(
+          self._container_opts.port)
+
+    self._id = self._docker_client.create_container(
         image=self._image.id, hostname=None, user=None, detach=True,
         stdin_open=False,
         tty=False, mem_limit=0,
-        ports=[self._container_opts.port],
-        volumes=self._container_opts.volumes.keys(),
+        ports=port_bindings.keys(),
+        volumes=(self._container_opts.volumes.keys()
+                 if self._container_opts.volumes else None),
         environment=self._container_opts.environment,
         dns=None,
-        network_disabled=False, name=None,
-        volumes_from=self._container_opts.volumes_from)
-    logging.info('Container %s created.', self._container_id)
+        network_disabled=False,
+        name=self.name)
+    logging.info('Container %s created.', self.id)
 
     self._docker_client.start(
-        self._container_id,
-        # Assigns a random available docker port
-        port_bindings={self._container_opts.port: None},
-        binds=self._container_opts.volumes)
+        self.id,
+        port_bindings=port_bindings,
+        binds=self._container_opts.volumes,
+        # In the newer API version volumes_from got moved from
+        # create_container to start. In older version volumes_from option was
+        # completely broken therefore we support only passing volumes_from
+        # in start.
+        volumes_from=self._container_opts.volumes_from)
 
-    container_info = self._docker_client.inspect_container(self._container_id)
+    if not port_bindings:
+      # Nothing to inspect
+      return
+
+    container_info = self._docker_client.inspect_container(self._id)
     network_settings = container_info['NetworkSettings']
     self._host = network_settings['IPAddress']
-    self._port = int(network_settings['Ports']
-                     ['%d/tcp' % self._container_opts.port][0]['HostPort'])
+    self._port_bindings = {
+        port: int(network_settings['Ports']['%d/tcp' % port][0]['HostPort'])
+        for port in port_bindings
+    }
 
   def Stop(self):
     """Stops a running container, removes it and underlying image if needed."""
-    if self._container_id:
-      self._docker_client.stop(self._container_id)
-      self._docker_client.remove_container(self._container_id, v=False,
+    if self._id:
+      self._docker_client.stop(self.id)
+      self._docker_client.remove_container(self.id, v=False,
                                            link=False)
-      self._container_id = None
+      self._id = None
       self._image.Remove()
 
+  def PortBinding(self, port):
+    """Get the host binding of a container port.
+
+    Args:
+      port: Port inside container.
+
+    Returns:
+      Port on the host system mapped to the given port inside of
+          the container.
+    """
+    return self._port_bindings.get(port)
+
   @property
   def host(self):
     """Host the container can be reached at by the host (i.e. client) system."""
@@ -293,7 +398,7 @@
   @property
   def port(self):
     """Port (on the host system) mapped to the port inside of the container."""
-    return self._port
+    return self._port_bindings[self._container_opts.port]
 
   @property
   def addr(self):
@@ -301,10 +406,20 @@
     return '%s:%d' % (self.host, self.port)
 
   @property
+  def id(self):
+    """Returns 64 hexadecimal digit string identifying the container."""
+    return self._id
+
+  @property
   def container_addr(self):
     """An address the container can be reached at by another container."""
     return '%s:%d' % (self._host, self._container_opts.port)
 
+  @property
+  def name(self):
+    """String, identifying a container. Required for data containers."""
+    return self._container_opts.name
+
   def __enter__(self):
     """Makes Container usable with "with" statement."""
     self.Start()
diff --git a/google/appengine/tools/handler_generator.py b/google/appengine/tools/handler_generator.py
index 947a7bf..dd23d18 100644
--- a/google/appengine/tools/handler_generator.py
+++ b/google/appengine/tools/handler_generator.py
@@ -38,14 +38,16 @@
 
 def GenerateYamlHandlersList(app_engine_web_xml, web_xml, static_files):
   """Produces a list of Yaml strings for dynamic and static handlers."""
+  welcome_properties = _MakeWelcomeProperties(web_xml, static_files)
   static_handler_generator = StaticHandlerGenerator(
-      app_engine_web_xml, web_xml, static_files)
+      app_engine_web_xml, web_xml, welcome_properties)
   dynamic_handler_generator = DynamicHandlerGenerator(
       app_engine_web_xml, web_xml)
 
-  if (len(static_handler_generator.GenerateOrderedHandlerList()) +
-      len(dynamic_handler_generator.GenerateOrderedHandlerList())
-      > MAX_HANDLERS):
+  handler_length = len(dynamic_handler_generator.GenerateOrderedHandlerList())
+  if static_files:
+    handler_length += len(static_handler_generator.GenerateOrderedHandlerList())
+  if handler_length > MAX_HANDLERS:
 
 
 
@@ -55,19 +57,74 @@
 
   yaml_statements = ['handlers:']
   if static_files:
-    static_handler_generator = StaticHandlerGenerator(
-        app_engine_web_xml, web_xml, static_files)
     yaml_statements += static_handler_generator.GetHandlerYaml()
   yaml_statements += dynamic_handler_generator.GetHandlerYaml()
 
   return yaml_statements
 
 
-def GenerateYamlHandlers(app_engine_web_xml, web_xml, static_files):
-  """Produces Yaml string writable to a file."""
-  handler_yaml = '\n'.join(
-      GenerateYamlHandlersList(app_engine_web_xml, web_xml, static_files))
-  return handler_yaml + '\n'
+def GenerateYamlHandlersListForDevAppServer(
+    app_engine_web_xml, web_xml, static_urls):
+  r"""Produces a list of Yaml strings for dynamic and static handlers.
+
+  This variant of GenerateYamlHandlersList is for the Dev App Server case.
+  The key difference there is that we serve files directly from the war
+  directory rather than constructing a parallel hierarchy with a special
+  __static__ directory. Since app.yaml doesn't support excluding URL patterns
+  and appengine-web.xml does, this means that we have to define patterns that
+  cover exactly the set of static files we want without pulling in any files
+  that are not supposed to be served as static files.
+
+  Args:
+    app_engine_web_xml: an app_engine_web_xml_parser.AppEngineWebXml object.
+    web_xml: a web_xml_parser.WebXml object.
+    static_urls: a list of two-item tuples where the first item is a URL pattern
+      string for a static file, such as '/stylesheets/main\.css', and the
+      second item is the app_engine_web_xml_parser.StaticFileInclude
+      representing the <static-files><include> XML element that caused that URL
+      pattern to be included in the list.
+
+  Returns:
+    A list of strings that together make up the lines of the generated app.yaml
+    file.
+  """
+  static_handler_generator = StaticHandlerGeneratorForDevAppServer(
+      app_engine_web_xml, web_xml, static_urls)
+  dynamic_handler_generator = DynamicHandlerGenerator(
+      app_engine_web_xml, web_xml)
+  return (['handlers:'] +
+          static_handler_generator.GetHandlerYaml() +
+          dynamic_handler_generator.GetHandlerYaml())
+
+
+def _MakeWelcomeProperties(web_xml, static_files):
+  """Makes the welcome_properties dict given web_xml and the static files.
+
+  Args:
+    web_xml: a parsed web.xml that may contain a <welcome-file-list> clause.
+    static_files: the list of all static files found in the app.
+
+  Returns:
+    A dict with a single entry where the key is 'welcome' and the value is
+    either None or a tuple of the file names in all the <welcome-file> clauses
+    that were retained.  A <welcome-file> clause is retained if its file name
+    matches at least one actual file in static_files.
+
+    For example, if the input looked like this:
+      <welcome-file-list>
+        <welcome-file>index.jsp</welcome-file>
+        <welcome-file>index.html</welcome-file>
+      </welcome-file-list>
+    and if there was a file /foo/bar/index.html but no file called index.jsp
+    anywhere in static_files, the result would be {'welcome': ('index.html',)}.
+  """
+  static_welcome_files = []
+  for welcome_file in web_xml.welcome_files:
+    if any(f.endswith('/' + welcome_file) for f in static_files):
+      static_welcome_files.append(welcome_file)
+
+  welcome_value = tuple(static_welcome_files) or None
+  return {'welcome': welcome_value}
 
 
 class HandlerGenerator(object):
@@ -223,22 +280,14 @@
 class StaticHandlerGenerator(HandlerGenerator):
   """Generates static handler yaml entries for app.yaml."""
 
-  def __init__(self, app_engine_web_xml, web_xml, static_files):
+  def __init__(self, app_engine_web_xml, web_xml, welcome_properties):
     super(StaticHandlerGenerator, self).__init__(app_engine_web_xml, web_xml)
-    self.static_files = static_files
-    static_welcome_files = []
-    for welcome_file in self.web_xml.welcome_files:
-      for static_file in static_files:
-        if static_file.endswith('/' + welcome_file):
-          static_welcome_files.append(welcome_file)
-          break
-
-    welcome_value = tuple(static_welcome_files) or None
-    self.welcome_properties = {'welcome': welcome_value}
+    self.static_file_includes = self.app_engine_web_xml.static_file_includes
+    self.welcome_properties = welcome_properties
 
   def MakeStaticFilePatternsIntoHandlers(self):
     """Creates SimpleHandlers out of XML-specified static file includes."""
-    includes = self.app_engine_web_xml.static_file_includes
+    includes = self.static_file_includes
     if not includes:
       return [handler.SimpleHandler('/*', {'type': 'static'})]
 
@@ -306,3 +355,58 @@
       statements.append('  http_headers:')
       statements += ['    %s: %s' % pair for pair in http_headers]
     return statements
+
+
+class StaticHandlerGeneratorForDevAppServer(StaticHandlerGenerator):
+  """Generates static handler yaml entries for app.yaml in Dev App Server.
+
+  This class overrides the GenerateOrderedHanderList and TranslateHandler
+  methods from its parent to work with the Dev App Server environment.
+  See the GenerateYamlHandlersListForDevAppServer method above for further
+  details.
+  """
+
+  def __init__(self, app_engine_web_xml, web_xml, static_urls):
+    super(StaticHandlerGeneratorForDevAppServer, self).__init__(
+        app_engine_web_xml, web_xml, {})
+    self.static_urls = static_urls
+
+  def GenerateOrderedHandlerList(self):
+    handler_patterns = self.MakeStaticUrlsIntoHandlers()
+
+
+
+
+    return handler.GetOrderedIntersection(handler_patterns)
+
+  def MakeStaticUrlsIntoHandlers(self):
+    handler_patterns = []
+    for url, include in self.static_urls:
+      properties = {'type': 'static'}
+      if include.expiration:
+        properties['expiration'] = include.expiration
+      if include.http_headers:
+        properties['http_headers'] = tuple(sorted(include.http_headers.items()))
+      handler_patterns.append(handler.SimpleHandler(url, properties))
+    return handler_patterns
+
+  def TranslateHandler(self, h):
+    """Translates SimpleHandler to static handler yaml statements."""
+
+    root = self.app_engine_web_xml.public_root
+
+
+    regex = h.Regexify()
+
+
+
+    split = 1 if regex.startswith('/') else 0
+
+    statements = ['- url: /(%s)' % regex[split:],
+                  '  static_files: %s\\1' % root,
+                  '  upload: __NOT_USED__',
+                  '  require_matching_file: True']
+
+    return (statements +
+            self.TranslateAdditionalOptions(h) +
+            self.TranslateAdditionalStaticOptions(h))
diff --git a/google/appengine/tools/yaml_translator.py b/google/appengine/tools/yaml_translator.py
index 5e78f58..8fd45e0 100644
--- a/google/appengine/tools/yaml_translator.py
+++ b/google/appengine/tools/yaml_translator.py
@@ -21,6 +21,9 @@
   AppYamlTranslator: Class that facilitates xml-to-yaml translation
 """
 
+import os
+import re
+
 from google.appengine.tools import app_engine_web_xml_parser as aewxp
 from google.appengine.tools import handler_generator
 from google.appengine.tools import web_xml_parser
@@ -32,9 +35,7 @@
 
 def TranslateXmlToYaml(app_engine_web_xml_str,
                        web_xml_str,
-                       static_files,
-                       has_jsps,
-                       api_version='1.0'):
+                       has_jsps):
   """Does xml-string to yaml-string translation, given each separate file text.
 
   Processes each xml string into an object representing the xml,
@@ -43,9 +44,7 @@
   Args:
     app_engine_web_xml_str: text from app_engine_web.xml
     web_xml_str: text from web.xml
-    static_files: list of static files
     has_jsps: true if the app has any *.jsp files
-    api_version: current api version
 
   Returns:
     The full text of the app.yaml generated from the xml files.
@@ -57,8 +56,41 @@
   web_parser = web_xml_parser.WebXmlParser()
   app_engine_web_xml = aewx_parser.ProcessXml(app_engine_web_xml_str)
   web_xml = web_parser.ProcessXml(web_xml_str, has_jsps)
-  translator = AppYamlTranslator(
-      app_engine_web_xml, web_xml, static_files, api_version)
+  translator = AppYamlTranslator(app_engine_web_xml, web_xml, [], '1.0')
+  return translator.GetYaml()
+
+
+def TranslateXmlToYamlForDevAppServer(app_engine_web_xml_str,
+                                      web_xml_str,
+                                      has_jsps,
+                                      war_root):
+  """Does xml-string to yaml-string translation, given each separate file text.
+
+  Processes each xml string into an object representing the xml,
+  and passes these to the translator. This variant is used in the Dev App Server
+  context, where files are served directly from the input war directory, unlike
+  the appcfg case where they are copied or linked into a parallel hierarchy.
+  This means that there is no __static__ directory containing exactly the files
+  that are supposed to be served statically.
+
+  Args:
+    app_engine_web_xml_str: text from app_engine_web.xml
+    web_xml_str: text from web.xml
+    has_jsps: true if the app has any *.jsp files
+    war_root: the path to the root directory of the war hierarchy
+
+  Returns:
+    The full text of the app.yaml generated from the xml files.
+
+  Raises:
+    AppEngineConfigException: raised in processing stage for illegal XML.
+  """
+  aewx_parser = aewxp.AppEngineWebXmlParser()
+  web_parser = web_xml_parser.WebXmlParser()
+  app_engine_web_xml = aewx_parser.ProcessXml(app_engine_web_xml_str)
+  web_xml = web_parser.ProcessXml(web_xml_str, has_jsps)
+  translator = AppYamlTranslatorForDevAppServer(
+      app_engine_web_xml, web_xml, war_root)
   return translator.GetYaml()
 
 
@@ -234,23 +266,40 @@
       return []
     statements = ['error_handlers:']
     for error_handler in self.app_engine_web_xml.static_error_handlers:
-      name = error_handler.name
-      if not name.startswith('/'):
-        name = '/' + name
 
-      if ('__static__' + name) not in self.static_files:
-        raise AppEngineConfigException(
-            'No static file found for error handler: %s, out of %s' %
-            (name, self.static_files))
-      statements.append('- file: __static__%s' % name)
+      path = self.ErrorHandlerPath(error_handler)
+      statements.append('- file: %s' % path)
       if error_handler.code:
         statements.append('  error_code: %s' % error_handler.code)
-      mime_type = self.web_xml.GetMimeTypeForPath(name)
+      mime_type = self.web_xml.GetMimeTypeForPath(error_handler.name)
       if mime_type:
         statements.append('  mime_type: %s' % mime_type)
 
     return statements
 
+  def ErrorHandlerPath(self, error_handler):
+    """Returns the relative path name for the given error handler.
+
+    Args:
+      error_handler: an app_engine_web_xml.ErrorHandler.
+
+    Returns:
+      the relative path name for the handler.
+
+    Raises:
+      AppEngineConfigException: if the named file is not an existing static
+        file.
+    """
+    name = error_handler.name
+    if not name.startswith('/'):
+      name = '/' + name
+    path = '__static__' + name
+    if path not in self.static_files:
+      raise AppEngineConfigException(
+          'No static file found for error handler: %s, out of %s' %
+          (name, self.static_files))
+    return path
+
   def TranslateHandlers(self):
     return handler_generator.GenerateYamlHandlersList(
         self.app_engine_web_xml,
@@ -268,3 +317,155 @@
     if missing:
       raise AppEngineConfigException('Missing required fields: %s' %
                                      ', '.join(missing))
+
+
+def _XmlPatternToRegEx(xml_pattern):
+  r"""Translates an appengine-web.xml pattern into a regular expression.
+
+  Specially, this applies to the patterns that appear in the <include> and
+  <exclude> elements inside <static-files>. They look like '/**.png' or
+  '/stylesheets/*.css', and are translated into expressions like
+  '^/.*\.png$' or '^/stylesheets/.*\.css$'.
+
+  Args:
+    xml_pattern: a string like '/**.png'
+
+  Returns:
+    a compiled regular expression like re.compile('^/.*\.png$').
+  """
+  result = ['^']
+  while xml_pattern:
+    if xml_pattern.startswith('**'):
+      result.append(r'.*')
+      xml_pattern = xml_pattern[1:]
+    elif xml_pattern.startswith('*'):
+      result.append(r'[^/]*')
+    elif xml_pattern.startswith('/'):
+
+
+      result.append('/')
+    else:
+      result.append(re.escape(xml_pattern[0]))
+    xml_pattern = xml_pattern[1:]
+  result.append('$')
+  return re.compile(''.join(result))
+
+
+class AppYamlTranslatorForDevAppServer(AppYamlTranslator):
+  """Subclass of AppYamlTranslator specialized for the Dev App Server case.
+
+  The key difference is that static files are served directly from the war
+  directory, which means that the app.yaml patterns we define must cover
+  exactly those files in that directory hierarchy that are supposed to be static
+  while not covering any files that are not supposed to be static.
+
+  Attributes:
+    war_root: the root directory of the war hierarchy.
+    static_urls: a list of two-item tuples where the first item is a URL that
+      should be served statically and the second item corresponds to the
+      <include> element that caused that URL to be included.
+  """
+
+  def __init__(self,
+               app_engine_web_xml,
+               web_xml,
+               war_root):
+    super(AppYamlTranslatorForDevAppServer, self).__init__(
+        app_engine_web_xml, web_xml, [], '1.0')
+    self.war_root = war_root
+    self.static_urls = self.IncludedStaticUrls()
+
+  def IncludedStaticUrls(self):
+    """Returns the URLs that should be resolved statically for this app.
+
+    The result includes a URL for every file in the war hierarchy that is
+    covered by one of the <include> elements for <static-files> and not covered
+    by any of the <exclude> elements.
+
+    Returns:
+      a list of two-item tuples where the first item is a URL that should be
+      served statically and the second item corresponds to the <include>
+      element that caused that URL to be included.
+    """
+
+
+
+
+
+
+    includes = self.app_engine_web_xml.static_file_includes
+    if not includes:
+
+
+
+      includes = [aewxp.StaticFileInclude('**', None, {})]
+    excludes = self.app_engine_web_xml.static_file_excludes
+    files = os.listdir(self.war_root)
+    web_inf_name = os.path.normcase('WEB-INF')
+    files = [f for f in files if os.path.normcase(f) != web_inf_name]
+    static_urls = []
+    includes_and_res = [(include, _XmlPatternToRegEx(include.pattern))
+                        for include in includes]
+    exclude_res = [_XmlPatternToRegEx(exclude) for exclude in excludes]
+    self.ComputeIncludedStaticUrls(
+        static_urls, self.war_root, '/', files, includes_and_res, exclude_res)
+    return static_urls
+
+
+
+
+
+  def ComputeIncludedStaticUrls(
+      self,
+      static_urls, dirpath, url_prefix, files, includes_and_res, exclude_res):
+    """Compute the URLs that should be resolved statically.
+
+    This recursive method is called for the war directory and every
+    subdirectory except the top-level WEB-INF directory. If we have arrived
+    at the directory <war-root>/foo/bar then dirpath will be <war-root>/foo/bar
+    and url_prefix will be /foo/bar.
+
+    Args:
+      static_urls: a list to be filled with the result, two-item tuples where
+        the first item is a URL and the second is a parsed <include> element.
+      dirpath: the path to the directory inside the war hierarchy that we have
+        reached at this point in the recursion.
+      url_prefix: the URL prefix that we have reached at this point in the
+        recursion.
+      files: the contents of the dirpath directory, minus the WEB-INF directory
+        if dirpath is the war directory itself.
+      includes_and_res: a list of two-item tuples where the first item is a
+        parsed <include> element and the second item is a compiled regular
+        expression corresponding to the path= pattern from that element.
+      exclude_res: a list of compiled regular expressions corresponding to the
+        path= patterns from <exclude> elements.
+    """
+    for f in files:
+      path = os.path.join(dirpath, f)
+      if os.path.isfile(path):
+        url = url_prefix + f
+        if not any(exclude_re.search(url) for exclude_re in exclude_res):
+          for include, include_re in includes_and_res:
+            if include_re.search(url):
+              static_urls.append((url, include))
+              break
+      else:
+        self.ComputeIncludedStaticUrls(
+            static_urls, path, url_prefix + f + '/', os.listdir(path),
+            includes_and_res, exclude_res)
+
+  def TranslateHandlers(self):
+    return handler_generator.GenerateYamlHandlersListForDevAppServer(
+        self.app_engine_web_xml,
+        self.web_xml,
+        self.static_urls)
+
+  def ErrorHandlerPath(self, error_handler):
+    name = error_handler.name
+    if name.startswith('/'):
+      name = name[1:]
+    if name not in self.static_files:
+      raise AppEngineConfigException(
+          'No static file found for error handler: %s, out of %s' %
+          (name, self.static_files))
+    return name
diff --git a/google/net/proto2/proto/descriptor_pb2.py b/google/net/proto2/proto/descriptor_pb2.py
index 9a51bb6..7ee6161 100644
--- a/google/net/proto2/proto/descriptor_pb2.py
+++ b/google/net/proto2/proto/descriptor_pb2.py
@@ -22,8 +22,11 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 
 
+_sym_db = _symbol_database.Default()
+
 
 
 
@@ -32,6 +35,7 @@
   package='proto2',
   serialized_pb=_b('\n!net/proto2/proto/descriptor.proto\x12\x06proto2\">\n\x11\x46ileDescriptorSet\x12)\n\x04\x66ile\x18\x01 \x03(\x0b\x32\x1b.proto2.FileDescriptorProto\"\x95\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12-\n\x0cmessage_type\x18\x04 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x05 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12/\n\x07service\x18\x06 \x03(\x0b\x32\x1e.proto2.ServiceDescriptorProto\x12/\n\textension\x18\x07 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12$\n\x07options\x18\x08 \x01(\x0b\x32\x13.proto2.FileOptions\x12\x30\n\x10source_code_info\x18\t \x01(\x0b\x32\x16.proto2.SourceCodeInfo\"\xa5\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05\x66ield\x18\x02 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12/\n\textension\x18\x06 \x03(\x0b\x32\x1c.proto2.FieldDescriptorProto\x12,\n\x0bnested_type\x18\x03 \x03(\x0b\x32\x17.proto2.DescriptorProto\x12.\n\tenum_type\x18\x04 \x03(\x0b\x32\x1b.proto2.EnumDescriptorProto\x12?\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32&.proto2.DescriptorProto.ExtensionRange\x12\x30\n\noneof_decl\x18\x08 \x03(\x0b\x32\x1c.proto2.OneofDescriptorProto\x12\'\n\x07options\x18\x07 \x01(\x0b\x32\x16.proto2.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x8e\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x31\n\x05label\x18\x04 \x01(\x0e\x32\".proto2.FieldDescriptorProto.Label\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.proto2.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12%\n\x07options\x18\x08 \x01(\x0b\x32\x14.proto2.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"z\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x03(\x0b\x32 .proto2.EnumValueDescriptorProto\x12$\n\x07options\x18\x03 \x01(\x0b\x32\x13.proto2.EnumOptions\"c\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12)\n\x07options\x18\x03 \x01(\x0b\x32\x18.proto2.EnumValueOptions\"\xad\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x06method\x18\x02 \x03(\x0b\x32\x1d.proto2.MethodDescriptorProto\x12-\n\x06stream\x18\x04 \x03(\x0b\x32\x1d.proto2.StreamDescriptorProto\x12\'\n\x07options\x18\x03 \x01(\x0b\x32\x16.proto2.ServiceOptions\"v\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.MethodOptions\"\x87\x01\n\x15StreamDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13\x63lient_message_type\x18\x02 \x01(\t\x12\x1b\n\x13server_message_type\x18\x03 \x01(\t\x12&\n\x07options\x18\x04 \x01(\x0b\x32\x15.proto2.StreamOptions\"\x89\n\n\x0b\x46ileOptions\x12\x19\n\x0e\x63\x63_api_version\x18\x02 \x01(\x05:\x01\x32\x12V\n\x14\x63\x63_api_compatibility\x18\x0f \x01(\x0e\x32&.proto2.FileOptions.CompatibilityLevel:\x10NO_COMPATIBILITY\x12\'\n\x19\x63\x63_proto_array_compatible\x18\x16 \x01(\x08:\x04true\x12\"\n\x14\x63\x63_utf8_verification\x18\x18 \x01(\x08:\x04true\x12$\n\x15\x63\x63_proto1_text_format\x18\x19 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x19\n\x0epy_api_version\x18\x04 \x01(\x05:\x01\x32\x12\x1b\n\x10java_api_version\x18\x05 \x01(\x05:\x01\x32\x12!\n\x13java_use_javaproto2\x18\x06 \x01(\x08:\x04true\x12\x1e\n\x10java_java5_enums\x18\x07 \x01(\x08:\x04true\x12)\n\x1ajava_generate_rpc_baseimpl\x18\r \x01(\x08:\x05\x66\x61lse\x12#\n\x14java_use_javastrings\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_alt_api_package\x18\x13 \x01(\t\x12\x34\n%java_enable_dual_generate_mutable_api\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10java_mutable_api\x18\x1c \x01(\x08:\x05\x66\x61lse\x12+\n#java_multiple_files_mutable_package\x18\x1d \x01(\t\x12=\n\x0coptimize_for\x18\t \x01(\x0e\x32 .proto2.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\x1a\n\x12javascript_package\x18\x0c \x01(\t\x12\x1a\n\x0fszl_api_version\x18\x0e \x01(\x05:\x01\x31\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"c\n\x12\x43ompatibilityLevel\x12\x14\n\x10NO_COMPATIBILITY\x10\x00\x12\x15\n\x11PROTO1_COMPATIBLE\x10\x64\x12 \n\x1c\x44\x45PRECATED_PROTO1_COMPATIBLE\x10\x32\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd1\x02\n\x0eMessageOptions\x12+\n#experimental_java_message_interface\x18\x04 \x03(\t\x12+\n#experimental_java_builder_interface\x18\x05 \x03(\t\x12+\n#experimental_java_interface_extends\x18\x06 \x03(\t\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x05\n\x0c\x46ieldOptions\x12\x31\n\x05\x63type\x18\x01 \x01(\x0e\x32\x1a.proto2.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x31\n\x05jtype\x18\x04 \x01(\x0e\x32\x1a.proto2.FieldOptions.JType:\x06NORMAL\x12\x36\n\x06jstype\x18\x06 \x01(\x0e\x32\x1b.proto2.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12<\n\x0fupgraded_option\x18\x0b \x03(\x0b\x32#.proto2.FieldOptions.UpgradedOption\x12%\n\x16\x64\x65precated_raw_message\x18\x0c \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\x1a-\n\x0eUpgradedOption\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"<\n\x05JType\x12\n\n\x06NORMAL\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\x1c\n\x18\x45XPERIMENTAL_BYTE_BUFFER\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x99\x01\n\x0b\x45numOptions\x12\x13\n\x0bproto1_name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"t\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb6\x01\n\x0eServiceOptions\x12\x1d\n\x0emulticast_stub\x18\x14 \x01(\x08:\x05\x66\x61lse\x12#\n\x17\x66\x61ilure_detection_delay\x18\x10 \x01(\x01:\x02-1\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd3\t\n\rMethodOptions\x12\x35\n\x08protocol\x18\x07 \x01(\x0e\x32\x1e.proto2.MethodOptions.Protocol:\x03TCP\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12$\n\x15\x64uplicate_suppression\x18\t \x01(\x08:\x05\x66\x61lse\x12\x18\n\tfail_fast\x18\n \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0e\x63lient_logging\x18\x0b \x01(\x11:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x0c \x01(\x11:\x03\x32\x35\x36\x12\x41\n\x0esecurity_level\x18\r \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x43\n\x0fresponse_format\x18\x0f \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x42\n\x0erequest_format\x18\x11 \x01(\x0e\x32\x1c.proto2.MethodOptions.Format:\x0cUNCOMPRESSED\x12\x13\n\x0bstream_type\x18\x12 \x01(\t\x12\x16\n\x0esecurity_label\x18\x13 \x01(\t\x12\x18\n\x10\x63lient_streaming\x18\x14 \x01(\x08\x12\x18\n\x10server_streaming\x18\x15 \x01(\x08\x12\x1a\n\x12legacy_stream_type\x18\x16 \x01(\t\x12\x1a\n\x12legacy_result_type\x18\x17 \x01(\t\x12(\n\x1clegacy_client_initial_tokens\x18\x18 \x01(\x03:\x02-1\x12(\n\x1clegacy_server_initial_tokens\x18\x19 \x01(\x03:\x02-1\x12^\n\tlog_level\x18\x1b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\x1c\n\x08Protocol\x12\x07\n\x03TCP\x10\x00\x12\x07\n\x03UDP\x10\x01\"e\n\rSecurityLevel\x12\x08\n\x04NONE\x10\x00\x12\r\n\tINTEGRITY\x10\x01\x12\x19\n\x15PRIVACY_AND_INTEGRITY\x10\x02\x12 \n\x1cSTRONG_PRIVACY_AND_INTEGRITY\x10\x03\"0\n\x06\x46ormat\x12\x10\n\x0cUNCOMPRESSED\x10\x00\x12\x14\n\x10ZIPPY_COMPRESSED\x10\x01\"\x9f\x01\n\x08LogLevel\x12\x0c\n\x08LOG_NONE\x10\x00\x12\x13\n\x0fLOG_HEADER_ONLY\x10\x01\x12/\n+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x10\x02\x12#\n\x1fLOG_HEADER_AND_FILTERED_PAYLOAD\x10\x03\x12\x1a\n\x16LOG_HEADER_AND_PAYLOAD\x10\x04*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe7\x04\n\rStreamOptions\x12!\n\x15\x63lient_initial_tokens\x18\x01 \x01(\x03:\x02-1\x12!\n\x15server_initial_tokens\x18\x02 \x01(\x03:\x02-1\x12<\n\ntoken_unit\x18\x03 \x01(\x0e\x32\x1f.proto2.StreamOptions.TokenUnit:\x07MESSAGE\x12\x41\n\x0esecurity_level\x18\x04 \x01(\x0e\x32#.proto2.MethodOptions.SecurityLevel:\x04NONE\x12\x16\n\x0esecurity_label\x18\x05 \x01(\t\x12\x1b\n\x0e\x63lient_logging\x18\x06 \x01(\x05:\x03\x32\x35\x36\x12\x1b\n\x0eserver_logging\x18\x07 \x01(\x05:\x03\x32\x35\x36\x12\x14\n\x08\x64\x65\x61\x64line\x18\x08 \x01(\x01:\x02-1\x12\x18\n\tfail_fast\x18\t \x01(\x08:\x05\x66\x61lse\x12\'\n\x18\x65nd_user_creds_requested\x18\n \x01(\x08:\x05\x66\x61lse\x12^\n\tlog_level\x18\x0b \x01(\x0e\x32\x1e.proto2.MethodOptions.LogLevel:+LOG_HEADER_AND_NON_PRIVATE_PAYLOAD_INTERNAL\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12:\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32\x1b.proto2.UninterpretedOption\"\"\n\tTokenUnit\x12\x0b\n\x07MESSAGE\x10\x00\x12\x08\n\x04\x42YTE\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x95\x02\n\x13UninterpretedOption\x12\x32\n\x04name\x18\x02 \x03(\x0b\x32$.proto2.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xa8\x01\n\x0eSourceCodeInfo\x12\x31\n\x08location\x18\x01 \x03(\x0b\x32\x1f.proto2.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB,\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01\xe0\x01\x01')
 )
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
@@ -119,6 +123,7 @@
   serialized_start=1217,
   serialized_end=1527,
 )
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
 
 _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
   name='Label',
@@ -144,6 +149,7 @@
   serialized_start=1529,
   serialized_end=1596,
 )
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
 
 _FILEOPTIONS_COMPATIBILITYLEVEL = _descriptor.EnumDescriptor(
   name='CompatibilityLevel',
@@ -169,6 +175,7 @@
   serialized_start=3415,
   serialized_end=3514,
 )
+_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_COMPATIBILITYLEVEL)
 
 _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
   name='OptimizeMode',
@@ -194,6 +201,7 @@
   serialized_start=3516,
   serialized_end=3574,
 )
+_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
 
 _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
   name='CType',
@@ -219,6 +227,7 @@
   serialized_start=4425,
   serialized_end=4472,
 )
+_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
 
 _FIELDOPTIONS_JTYPE = _descriptor.EnumDescriptor(
   name='JType',
@@ -244,6 +253,7 @@
   serialized_start=4474,
   serialized_end=4534,
 )
+_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JTYPE)
 
 _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
   name='JSType',
@@ -269,6 +279,7 @@
   serialized_start=4536,
   serialized_end=4589,
 )
+_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
 
 _METHODOPTIONS_PROTOCOL = _descriptor.EnumDescriptor(
   name='Protocol',
@@ -290,6 +301,7 @@
   serialized_start=5943,
   serialized_end=5971,
 )
+_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_PROTOCOL)
 
 _METHODOPTIONS_SECURITYLEVEL = _descriptor.EnumDescriptor(
   name='SecurityLevel',
@@ -319,6 +331,7 @@
   serialized_start=5973,
   serialized_end=6074,
 )
+_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_SECURITYLEVEL)
 
 _METHODOPTIONS_FORMAT = _descriptor.EnumDescriptor(
   name='Format',
@@ -340,6 +353,7 @@
   serialized_start=6076,
   serialized_end=6124,
 )
+_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_FORMAT)
 
 _METHODOPTIONS_LOGLEVEL = _descriptor.EnumDescriptor(
   name='LogLevel',
@@ -373,6 +387,7 @@
   serialized_start=6127,
   serialized_end=6286,
 )
+_sym_db.RegisterEnumDescriptor(_METHODOPTIONS_LOGLEVEL)
 
 _STREAMOPTIONS_TOKENUNIT = _descriptor.EnumDescriptor(
   name='TokenUnit',
@@ -394,6 +409,7 @@
   serialized_start=6870,
   serialized_end=6904,
 )
+_sym_db.RegisterEnumDescriptor(_STREAMOPTIONS_TOKENUNIT)
 
 
 _FILEDESCRIPTORSET = _descriptor.Descriptor(
@@ -2097,12 +2113,14 @@
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(FileDescriptorSet)
 
 FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _FILEDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(FileDescriptorProto)
 
 DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
 
@@ -2116,60 +2134,71 @@
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(DescriptorProto)
+_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
 
 FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _FIELDDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(FieldDescriptorProto)
 
 OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(OneofDescriptorProto)
 
 EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _ENUMDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(EnumDescriptorProto)
 
 EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(EnumValueDescriptorProto)
 
 ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(ServiceDescriptorProto)
 
 MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _METHODDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(MethodDescriptorProto)
 
 StreamDescriptorProto = _reflection.GeneratedProtocolMessageType('StreamDescriptorProto', (_message.Message,), dict(
   DESCRIPTOR = _STREAMDESCRIPTORPROTO,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(StreamDescriptorProto)
 
 FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
   DESCRIPTOR = _FILEOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(FileOptions)
 
 MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
   DESCRIPTOR = _MESSAGEOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(MessageOptions)
 
 FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
 
@@ -2183,36 +2212,43 @@
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(FieldOptions)
+_sym_db.RegisterMessage(FieldOptions.UpgradedOption)
 
 EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
   DESCRIPTOR = _ENUMOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(EnumOptions)
 
 EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
   DESCRIPTOR = _ENUMVALUEOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(EnumValueOptions)
 
 ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
   DESCRIPTOR = _SERVICEOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(ServiceOptions)
 
 MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
   DESCRIPTOR = _METHODOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(MethodOptions)
 
 StreamOptions = _reflection.GeneratedProtocolMessageType('StreamOptions', (_message.Message,), dict(
   DESCRIPTOR = _STREAMOPTIONS,
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(StreamOptions)
 
 UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
 
@@ -2226,6 +2262,8 @@
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(UninterpretedOption)
+_sym_db.RegisterMessage(UninterpretedOption.NamePart)
 
 SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
 
@@ -2239,6 +2277,8 @@
   __module__ = 'google.net.proto2.proto.descriptor_pb2'
 
   ))
+_sym_db.RegisterMessage(SourceCodeInfo)
+_sym_db.RegisterMessage(SourceCodeInfo.Location)
 
 
 
diff --git a/google/net/proto2/python/internal/api_implementation.py b/google/net/proto2/python/internal/api_implementation.py
index 2dc3f5b..e8f4ce9 100644
--- a/google/net/proto2/python/internal/api_implementation.py
+++ b/google/net/proto2/python/internal/api_implementation.py
@@ -16,15 +16,11 @@
 #
 
 
-
-
+"""Determine which implementation of the protobuf API is used in this process.
 """
-This module is the central entity that determines which implementation of the
-API is used.
-"""
-
 
 import os
+import sys
 
 try:
 
@@ -32,6 +28,7 @@
 
 
   _api_version = _api_implementation.api_version
+  del _api_implementation
 except ImportError:
   _api_version = 0
 
@@ -47,36 +44,23 @@
 _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
                                  _default_implementation_type)
 
-
 if _implementation_type != 'python':
-
-
-
   _implementation_type = 'cpp'
 
 
 
 
 
-
-
-
-
-
-
-
 _implementation_version_str = os.getenv(
     'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION',
     _default_version_str)
 
-
 if _implementation_version_str not in ('1', '2'):
   raise ValueError(
       "unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" +
       _implementation_version_str + "' (supported versions: 1, 2)"
       )
 
-
 _implementation_version = int(_implementation_version_str)
 
 
@@ -88,5 +72,6 @@
   return _implementation_type
 
 
+
 def Version():
   return _implementation_version
diff --git a/google/net/proto2/python/public/basic_descriptor_pool.py b/google/net/proto2/python/public/basic_descriptor_pool.py
index 971828f..d52a717 100644
--- a/google/net/proto2/python/public/basic_descriptor_pool.py
+++ b/google/net/proto2/python/public/basic_descriptor_pool.py
@@ -14,135 +14,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""A minimal DescriptorPool implementation.
-
-BasicDescriptorPool is a DescriptorPool with no underlying DescriptorDatabase.
-This makes it suitable for use with SymbolDatabase, since the messages
-registered there are not generated via a DescriptorDatabase.
-"""
-
-
-def _NormalizeFullyQualifiedName(name):
-  """Remove leading period from fully-qualified type name.
-
-  Sometimes the proto generator prepends a period (.) in front of fully
-  qualified packages, but this isn't consistent and varies depending on if you
-  are using a pre-compiled file descriptor proto, one from the proto file
-  parser, or one from other dynamic sources. This function normalizes these
-  names by removing the leading period.
-
-  Args:
-    name: A str, the fully-qualified symbol name.
-
-  Returns:
-    A str, the normalized fully-qualified symbol name.
-  """
-  return name.lstrip('.')
-
-
-class BasicDescriptorPool(object):
-  """A pool of related Descriptor, EnumDescriptor and FileDescriptors."""
-
-  def __init__(self):
-    """Initializes a Pool of proto buffs."""
-    self._descriptors = {}
-    self._enum_descriptors = {}
-    self._file_descriptors = {}
-
-  def AddMessage(self, desc):
-    """Adds a Descriptor to the pool, non-recursively.
-
-    If the Descriptor contains nested messages or enums, the caller must
-    explicitly register them. This method also registers the FileDescriptor
-    associated with the message.
-
-    Args:
-      desc: A Descriptor.
-    """
-
-    self._descriptors[desc.full_name] = desc
-    self.AddFile(desc.file)
-
-  def AddEnum(self, enum_desc):
-    """Adds an EnumDescriptor to the pool.
-
-    This method also registers the FileDescriptor associated with the message.
-
-    Args:
-      enum_desc: An EnumDescriptor.
-    """
-
-    self._enum_descriptors[enum_desc.full_name] = enum_desc
-    self.AddFile(enum_desc.file)
-
-  def AddFile(self, file_desc):
-    """Adds a FileDescriptor to the pool, non-recursively.
-
-    If the FileDescriptor contains messages or enums, the caller must explicitly
-    register them.
-
-    Args:
-      file_desc: A FileDescriptor.
-    """
-
-    self._file_descriptors[file_desc.name] = file_desc
-
-  def FindFileByName(self, file_name):
-    """Gets a FileDescriptor by file name.
-
-    Args:
-      file_name: The path to the file to get a descriptor for.
-
-    Returns:
-      A FileDescriptor for the named file.
-
-    Raises:
-      KeyError: if the file can not be found in the pool.
-    """
-
-    return self._file_descriptors[file_name]
-
-  def FindFileContainingSymbol(self, symbol):
-    """Gets the FileDescriptor for the file containing the specified symbol.
-
-    Args:
-      symbol: The name of the symbol to search for.
-
-    Returns:
-      A FileDescriptor that contains the specified symbol.
-
-    Raises:
-      KeyError: if the file can not be found in the pool.
-    """
-
-    symbol = _NormalizeFullyQualifiedName(symbol)
-    try:
-      return self._descriptors[symbol].file
-    except KeyError:
-      return self._enum_descriptors[symbol].file
-
-  def FindMessageTypeByName(self, full_name):
-    """Loads the named descriptor from the pool.
-
-    Args:
-      full_name: The full name of the descriptor to load.
-
-    Returns:
-      The descriptor for the named type.
-    """
-
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    return self._descriptors[full_name]
-
-  def FindEnumTypeByName(self, full_name):
-    """Loads the named enum descriptor from the pool.
-
-    Args:
-      full_name: The full name of the enum descriptor to load.
-
-    Returns:
-      The enum descriptor for the named type.
-    """
-
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    return self._enum_descriptors[full_name]
+"""To be removed once all references to this file have been removed."""
diff --git a/google/net/proto2/python/public/descriptor_database.py b/google/net/proto2/python/public/descriptor_database.py
new file mode 100644
index 0000000..702a48d
--- /dev/null
+++ b/google/net/proto2/python/public/descriptor_database.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""Provides a container for DescriptorProtos."""
+
+
+
+class Error(Exception):
+  pass
+
+
+class DescriptorDatabaseConflictingDefinitionError(Error):
+  """Raised when a proto is added with the same name & different descriptor."""
+
+
+class DescriptorDatabase(object):
+  """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
+
+  def __init__(self):
+    self._file_desc_protos_by_file = {}
+    self._file_desc_protos_by_symbol = {}
+
+  def Add(self, file_desc_proto):
+    """Adds the FileDescriptorProto and its types to this database.
+
+    Args:
+      file_desc_proto: The FileDescriptorProto to add.
+    Raises:
+      DescriptorDatabaseException: if an attempt is made to add a proto
+        with the same name but different definition than an exisiting
+        proto in the database.
+    """
+    proto_name = file_desc_proto.name
+    if proto_name not in self._file_desc_protos_by_file:
+      self._file_desc_protos_by_file[proto_name] = file_desc_proto
+    elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
+      raise DescriptorDatabaseConflictingDefinitionError(
+          '%s already added, but with different descriptor.' % proto_name)
+
+    package = file_desc_proto.package
+    for message in file_desc_proto.message_type:
+      self._file_desc_protos_by_symbol.update(
+          (name, file_desc_proto) for name in _ExtractSymbols(message, package))
+    for enum in file_desc_proto.enum_type:
+      self._file_desc_protos_by_symbol[
+          '.'.join((package, enum.name))] = file_desc_proto
+
+  def FindFileByName(self, name):
+    """Finds the file descriptor proto by file name.
+
+    Typically the file name is a relative path ending to a .proto file. The
+    proto with the given name will have to have been added to this database
+    using the Add method or else an error will be raised.
+
+    Args:
+      name: The file name to find.
+
+    Returns:
+      The file descriptor proto matching the name.
+
+    Raises:
+      KeyError if no file by the given name was added.
+    """
+
+    return self._file_desc_protos_by_file[name]
+
+  def FindFileContainingSymbol(self, symbol):
+    """Finds the file descriptor proto containing the specified symbol.
+
+    The symbol should be a fully qualified name including the file descriptor's
+    package and any containing messages. Some examples:
+
+    'some.package.name.Message'
+    'some.package.name.Message.NestedEnum'
+
+    The file descriptor proto containing the specified symbol must be added to
+    this database using the Add method or else an error will be raised.
+
+    Args:
+      symbol: The fully qualified symbol name.
+
+    Returns:
+      The file descriptor proto containing the symbol.
+
+    Raises:
+      KeyError if no file contains the specified symbol.
+    """
+
+    return self._file_desc_protos_by_symbol[symbol]
+
+
+def _ExtractSymbols(desc_proto, package):
+  """Pulls out all the symbols from a descriptor proto.
+
+  Args:
+    desc_proto: The proto to extract symbols from.
+    package: The package containing the descriptor type.
+
+  Yields:
+    The fully qualified name found in the descriptor.
+  """
+
+  message_name = '.'.join((package, desc_proto.name))
+  yield message_name
+  for nested_type in desc_proto.nested_type:
+    for symbol in _ExtractSymbols(nested_type, message_name):
+      yield symbol
+    for enum_type in desc_proto.enum_type:
+      yield '.'.join((message_name, enum_type.name))
diff --git a/google/net/proto2/python/public/descriptor_pool.py b/google/net/proto2/python/public/descriptor_pool.py
new file mode 100644
index 0000000..78e0962
--- /dev/null
+++ b/google/net/proto2/python/public/descriptor_pool.py
@@ -0,0 +1,619 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+"""Provides DescriptorPool to use as a container for proto2 descriptors.
+
+The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
+a collection of protocol buffer descriptors for use when dynamically creating
+message types at runtime.
+
+For most applications protocol buffers should be used via modules generated by
+the protocol buffer compiler tool. This should only be used when the type of
+protocol buffers used in an application or library cannot be predetermined.
+
+Below is a straightforward example on how to use this class:
+
+  pool = DescriptorPool()
+  file_descriptor_protos = [ ... ]
+  for file_descriptor_proto in file_descriptor_protos:
+    pool.Add(file_descriptor_proto)
+  my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
+
+The message descriptor can be used in conjunction with the message_factory
+module in order to create a protocol buffer class that can be encoded and
+decoded.
+
+If you want to get a Python class for the specified proto, use the
+helper functions inside google.net.proto2.python.public.message_factory
+directly instead of this class.
+"""
+
+
+import sys
+
+from google.net.proto2.python.public import descriptor
+from google.net.proto2.python.public import descriptor_database
+from google.net.proto2.python.public import text_encoding
+
+
+def _NormalizeFullyQualifiedName(name):
+  """Remove leading period from fully-qualified type name.
+
+  Due to b/13860351 in descriptor_database.py, types in the root namespace are
+  generated with a leading period. This function removes that prefix.
+
+  Args:
+    name: A str, the fully-qualified symbol name.
+
+  Returns:
+    A str, the normalized fully-qualified symbol name.
+  """
+  return name.lstrip('.')
+
+
+class DescriptorPool(object):
+  """A collection of protobufs dynamically constructed by descriptor protos."""
+
+  def __init__(self, descriptor_db=None):
+    """Initializes a Pool of proto buffs.
+
+    The descriptor_db argument to the constructor is provided to allow
+    specialized file descriptor proto lookup code to be triggered on demand. An
+    example would be an implementation which will read and compile a file
+    specified in a call to FindFileByName() and not require the call to Add()
+    at all. Results from this database will be cached internally here as well.
+
+    Args:
+      descriptor_db: A secondary source of file descriptors.
+    """
+
+    self._internal_db = descriptor_database.DescriptorDatabase()
+    self._descriptor_db = descriptor_db
+    self._descriptors = {}
+    self._enum_descriptors = {}
+    self._file_descriptors = {}
+
+  def Add(self, file_desc_proto):
+    """Adds the FileDescriptorProto and its types to this pool.
+
+    Args:
+      file_desc_proto: The FileDescriptorProto to add.
+    """
+
+    self._internal_db.Add(file_desc_proto)
+
+  def AddDescriptor(self, desc):
+    """Adds a Descriptor to the pool, non-recursively.
+
+    If the Descriptor contains nested messages or enums, the caller must
+    explicitly register them. This method also registers the FileDescriptor
+    associated with the message.
+
+    Args:
+      desc: A Descriptor.
+    """
+    if not isinstance(desc, descriptor.Descriptor):
+      raise TypeError('Expected instance of descriptor.Descriptor.')
+
+    self._descriptors[desc.full_name] = desc
+    self.AddFileDescriptor(desc.file)
+
+  def AddEnumDescriptor(self, enum_desc):
+    """Adds an EnumDescriptor to the pool.
+
+    This method also registers the FileDescriptor associated with the message.
+
+    Args:
+      enum_desc: An EnumDescriptor.
+    """
+
+    if not isinstance(enum_desc, descriptor.EnumDescriptor):
+      raise TypeError('Expected instance of descriptor.EnumDescriptor.')
+
+    self._enum_descriptors[enum_desc.full_name] = enum_desc
+    self.AddFileDescriptor(enum_desc.file)
+
+  def AddFileDescriptor(self, file_desc):
+    """Adds a FileDescriptor to the pool, non-recursively.
+
+    If the FileDescriptor contains messages or enums, the caller must explicitly
+    register them.
+
+    Args:
+      file_desc: A FileDescriptor.
+    """
+
+    if not isinstance(file_desc, descriptor.FileDescriptor):
+      raise TypeError('Expected instance of descriptor.FileDescriptor.')
+    self._file_descriptors[file_desc.name] = file_desc
+
+  def FindFileByName(self, file_name):
+    """Gets a FileDescriptor by file name.
+
+    Args:
+      file_name: The path to the file to get a descriptor for.
+
+    Returns:
+      A FileDescriptor for the named file.
+
+    Raises:
+      KeyError: if the file can not be found in the pool.
+    """
+
+    try:
+      return self._file_descriptors[file_name]
+    except KeyError:
+      pass
+
+    try:
+      file_proto = self._internal_db.FindFileByName(file_name)
+    except KeyError:
+      _, error, _ = sys.exc_info()
+      if self._descriptor_db:
+        file_proto = self._descriptor_db.FindFileByName(file_name)
+      else:
+        raise error
+    if not file_proto:
+      raise KeyError('Cannot find a file named %s' % file_name)
+    return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+  def FindFileContainingSymbol(self, symbol):
+    """Gets the FileDescriptor for the file containing the specified symbol.
+
+    Args:
+      symbol: The name of the symbol to search for.
+
+    Returns:
+      A FileDescriptor that contains the specified symbol.
+
+    Raises:
+      KeyError: if the file can not be found in the pool.
+    """
+
+    symbol = _NormalizeFullyQualifiedName(symbol)
+    try:
+      return self._descriptors[symbol].file
+    except KeyError:
+      pass
+
+    try:
+      return self._enum_descriptors[symbol].file
+    except KeyError:
+      pass
+
+    try:
+      file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+    except KeyError:
+      _, error, _ = sys.exc_info()
+      if self._descriptor_db:
+        file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+      else:
+        raise error
+    if not file_proto:
+      raise KeyError('Cannot find a file containing %s' % symbol)
+    return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+  def FindMessageTypeByName(self, full_name):
+    """Loads the named descriptor from the pool.
+
+    Args:
+      full_name: The full name of the descriptor to load.
+
+    Returns:
+      The descriptor for the named type.
+    """
+
+    full_name = _NormalizeFullyQualifiedName(full_name)
+    if full_name not in self._descriptors:
+      self.FindFileContainingSymbol(full_name)
+    return self._descriptors[full_name]
+
+  def FindEnumTypeByName(self, full_name):
+    """Loads the named enum descriptor from the pool.
+
+    Args:
+      full_name: The full name of the enum descriptor to load.
+
+    Returns:
+      The enum descriptor for the named type.
+    """
+
+    full_name = _NormalizeFullyQualifiedName(full_name)
+    if full_name not in self._enum_descriptors:
+      self.FindFileContainingSymbol(full_name)
+    return self._enum_descriptors[full_name]
+
+  def _ConvertFileProtoToFileDescriptor(self, file_proto):
+    """Creates a FileDescriptor from a proto or returns a cached copy.
+
+    This method also has the side effect of loading all the symbols found in
+    the file into the appropriate dictionaries in the pool.
+
+    Args:
+      file_proto: The proto to convert.
+
+    Returns:
+      A FileDescriptor matching the passed in proto.
+    """
+
+    if file_proto.name not in self._file_descriptors:
+      built_deps = list(self._GetDeps(file_proto.dependency))
+      direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
+
+      file_descriptor = descriptor.FileDescriptor(
+          name=file_proto.name,
+          package=file_proto.package,
+          options=file_proto.options,
+          serialized_pb=file_proto.SerializeToString(),
+          dependencies=direct_deps)
+      scope = {}
+
+
+
+
+
+      for dependency in built_deps:
+        scope.update(self._ExtractSymbols(
+            dependency.message_types_by_name.values()))
+        scope.update((_PrefixWithDot(enum.full_name), enum)
+                     for enum in dependency.enum_types_by_name.values())
+
+      for message_type in file_proto.message_type:
+        message_desc = self._ConvertMessageDescriptor(
+            message_type, file_proto.package, file_descriptor, scope)
+        file_descriptor.message_types_by_name[message_desc.name] = message_desc
+
+      for enum_type in file_proto.enum_type:
+        file_descriptor.enum_types_by_name[enum_type.name] = (
+            self._ConvertEnumDescriptor(enum_type, file_proto.package,
+                                        file_descriptor, None, scope))
+
+      for index, extension_proto in enumerate(file_proto.extension):
+        extension_desc = self.MakeFieldDescriptor(
+            extension_proto, file_proto.package, index, is_extension=True)
+        extension_desc.containing_type = self._GetTypeFromScope(
+            file_descriptor.package, extension_proto.extendee, scope)
+        self.SetFieldType(extension_proto, extension_desc,
+                          file_descriptor.package, scope)
+        file_descriptor.extensions_by_name[extension_desc.name] = extension_desc
+
+      for desc_proto in file_proto.message_type:
+        self.SetAllFieldTypes(file_proto.package, desc_proto, scope)
+
+      if file_proto.package:
+        desc_proto_prefix = _PrefixWithDot(file_proto.package)
+      else:
+        desc_proto_prefix = ''
+
+      for desc_proto in file_proto.message_type:
+        desc = self._GetTypeFromScope(desc_proto_prefix, desc_proto.name, scope)
+        file_descriptor.message_types_by_name[desc_proto.name] = desc
+      self.Add(file_proto)
+      self._file_descriptors[file_proto.name] = file_descriptor
+
+    return self._file_descriptors[file_proto.name]
+
+  def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
+                                scope=None):
+    """Adds the proto to the pool in the specified package.
+
+    Args:
+      desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+      package: The package the proto should be located in.
+      file_desc: The file containing this message.
+      scope: Dict mapping short and full symbols to message and enum types.
+
+    Returns:
+      The added descriptor.
+    """
+
+    if package:
+      desc_name = '.'.join((package, desc_proto.name))
+    else:
+      desc_name = desc_proto.name
+
+    if file_desc is None:
+      file_name = None
+    else:
+      file_name = file_desc.name
+
+    if scope is None:
+      scope = {}
+
+    nested = [
+        self._ConvertMessageDescriptor(nested, desc_name, file_desc, scope)
+        for nested in desc_proto.nested_type]
+    enums = [
+        self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
+        for enum in desc_proto.enum_type]
+    fields = [self.MakeFieldDescriptor(field, desc_name, index)
+              for index, field in enumerate(desc_proto.field)]
+    extensions = [
+        self.MakeFieldDescriptor(extension, desc_name, index, is_extension=True)
+        for index, extension in enumerate(desc_proto.extension)]
+    extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
+    if extension_ranges:
+      is_extendable = True
+    else:
+      is_extendable = False
+    desc = descriptor.Descriptor(
+        name=desc_proto.name,
+        full_name=desc_name,
+        filename=file_name,
+        containing_type=None,
+        fields=fields,
+        nested_types=nested,
+        enum_types=enums,
+        extensions=extensions,
+        options=desc_proto.options,
+        is_extendable=is_extendable,
+        extension_ranges=extension_ranges,
+        file=file_desc,
+        serialized_start=None,
+        serialized_end=None)
+    for nested in desc.nested_types:
+      nested.containing_type = desc
+    for enum in desc.enum_types:
+      enum.containing_type = desc
+    scope[_PrefixWithDot(desc_name)] = desc
+    self._descriptors[desc_name] = desc
+    return desc
+
+  def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
+                             containing_type=None, scope=None):
+    """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
+
+    Args:
+      enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
+      package: Optional package name for the new message EnumDescriptor.
+      file_desc: The file containing the enum descriptor.
+      containing_type: The type containing this enum.
+      scope: Scope containing available types.
+
+    Returns:
+      The added descriptor
+    """
+
+    if package:
+      enum_name = '.'.join((package, enum_proto.name))
+    else:
+      enum_name = enum_proto.name
+
+    if file_desc is None:
+      file_name = None
+    else:
+      file_name = file_desc.name
+
+    values = [self._MakeEnumValueDescriptor(value, index)
+              for index, value in enumerate(enum_proto.value)]
+    desc = descriptor.EnumDescriptor(name=enum_proto.name,
+                                     full_name=enum_name,
+                                     filename=file_name,
+                                     file=file_desc,
+                                     values=values,
+                                     containing_type=containing_type,
+                                     options=enum_proto.options)
+    scope['.%s' % enum_name] = desc
+    self._enum_descriptors[enum_name] = desc
+    return desc
+
+  def MakeFieldDescriptor(self, field_proto, message_name, index,
+                          is_extension=False):
+    """Creates a field descriptor from a FieldDescriptorProto.
+
+    For message and enum type fields, this method will do a look up
+    in the pool for the appropriate descriptor for that type. If it
+    is unavailable, it will fall back to the _source function to
+    create it. If this type is still unavailable, construction will
+    fail.
+
+    Args:
+      field_proto: The proto describing the field.
+      message_name: The name of the containing message.
+      index: Index of the field
+      is_extension: Indication that this field is for an extension.
+
+    Returns:
+      An initialized FieldDescriptor object
+    """
+
+    if message_name:
+      full_name = '.'.join((message_name, field_proto.name))
+    else:
+      full_name = field_proto.name
+
+    return descriptor.FieldDescriptor(
+        name=field_proto.name,
+        full_name=full_name,
+        index=index,
+        number=field_proto.number,
+        type=field_proto.type,
+        cpp_type=None,
+        message_type=None,
+        enum_type=None,
+        containing_type=None,
+        label=field_proto.label,
+        has_default_value=False,
+        default_value=None,
+        is_extension=is_extension,
+        extension_scope=None,
+        options=field_proto.options)
+
+  def SetAllFieldTypes(self, package, desc_proto, scope):
+    """Sets all the descriptor's fields's types.
+
+    This method also sets the containing types on any extensions.
+
+    Args:
+      package: The current package of desc_proto.
+      desc_proto: The message descriptor to update.
+      scope: Enclosing scope of available types.
+    """
+
+    package = _PrefixWithDot(package)
+
+    main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
+
+    if package == '.':
+      nested_package = _PrefixWithDot(desc_proto.name)
+    else:
+      nested_package = '.'.join([package, desc_proto.name])
+
+    for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
+      self.SetFieldType(field_proto, field_desc, nested_package, scope)
+
+    for extension_proto, extension_desc in (
+        zip(desc_proto.extension, main_desc.extensions)):
+      extension_desc.containing_type = self._GetTypeFromScope(
+          nested_package, extension_proto.extendee, scope)
+      self.SetFieldType(extension_proto, extension_desc, nested_package, scope)
+
+    for nested_type in desc_proto.nested_type:
+      self.SetAllFieldTypes(nested_package, nested_type, scope)
+
+  def SetFieldType(self, field_proto, field_desc, package, scope):
+    """Sets the field's type, cpp_type, message_type and enum_type.
+
+    Args:
+      field_proto: Data about the field in proto format.
+      field_desc: The descriptor to modiy.
+      package: The package the field's container is in.
+      scope: Enclosing scope of available types.
+    """
+    if field_proto.type_name:
+      desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
+    else:
+      desc = None
+
+    if not field_proto.HasField('type'):
+      if isinstance(desc, descriptor.Descriptor):
+        field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
+      else:
+        field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
+
+    field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
+        field_proto.type)
+
+    if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
+        or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
+      field_desc.message_type = desc
+
+    if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+      field_desc.enum_type = desc
+
+    if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+      field_desc.has_default_value = False
+      field_desc.default_value = []
+    elif field_proto.HasField('default_value'):
+      field_desc.has_default_value = True
+      if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+          field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+        field_desc.default_value = float(field_proto.default_value)
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+        field_desc.default_value = field_proto.default_value
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+        field_desc.default_value = field_proto.default_value.lower() == 'true'
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+        field_desc.default_value = field_desc.enum_type.values_by_name[
+            field_proto.default_value].index
+      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+        field_desc.default_value = text_encoding.CUnescape(
+            field_proto.default_value)
+      else:
+        field_desc.default_value = int(field_proto.default_value)
+    else:
+      field_desc.has_default_value = False
+      field_desc.default_value = None
+
+    field_desc.type = field_proto.type
+
+  def _MakeEnumValueDescriptor(self, value_proto, index):
+    """Creates a enum value descriptor object from a enum value proto.
+
+    Args:
+      value_proto: The proto describing the enum value.
+      index: The index of the enum value.
+
+    Returns:
+      An initialized EnumValueDescriptor object.
+    """
+
+    return descriptor.EnumValueDescriptor(
+        name=value_proto.name,
+        index=index,
+        number=value_proto.number,
+        options=value_proto.options,
+        type=None)
+
+  def _ExtractSymbols(self, descriptors):
+    """Pulls out all the symbols from descriptor protos.
+
+    Args:
+      descriptors: The messages to extract descriptors from.
+    Yields:
+      A two element tuple of the type name and descriptor object.
+    """
+
+    for desc in descriptors:
+      yield (_PrefixWithDot(desc.full_name), desc)
+      for symbol in self._ExtractSymbols(desc.nested_types):
+        yield symbol
+      for enum in desc.enum_types:
+        yield (_PrefixWithDot(enum.full_name), enum)
+
+  def _GetDeps(self, dependencies):
+    """Recursively finds dependencies for file protos.
+
+    Args:
+      dependencies: The names of the files being depended on.
+
+    Yields:
+      Each direct and indirect dependency.
+    """
+
+    for dependency in dependencies:
+      dep_desc = self.FindFileByName(dependency)
+      yield dep_desc
+      for parent_dep in dep_desc.dependencies:
+        yield parent_dep
+
+  def _GetTypeFromScope(self, package, type_name, scope):
+    """Finds a given type name in the current scope.
+
+    Args:
+      package: The package the proto should be located in.
+      type_name: The name of the type to be found in the scope.
+      scope: Dict mapping short and full symbols to message and enum types.
+
+    Returns:
+      The descriptor for the requested type.
+    """
+    if type_name not in scope:
+      components = _PrefixWithDot(package).split('.')
+      while components:
+        possible_match = '.'.join(components + [type_name])
+        if possible_match in scope:
+          type_name = possible_match
+          break
+        else:
+          components.pop(-1)
+    return scope[type_name]
+
+
+def _PrefixWithDot(name):
+  return name if name.startswith('.') else '.%s' % name
diff --git a/google/net/proto2/python/public/symbol_database.py b/google/net/proto2/python/public/symbol_database.py
index 6de0afa..95ca092 100644
--- a/google/net/proto2/python/public/symbol_database.py
+++ b/google/net/proto2/python/public/symbol_database.py
@@ -19,7 +19,7 @@
 SymbolDatabase makes it easy to create new instances of a registered type, given
 only the type's protocol buffer symbol name. Once all symbols are registered,
 they can be accessed using either the MessageFactory interface which
-SymbolDatabase exposes, or the BasicDescriptorPool interface of the underlying
+SymbolDatabase exposes, or the DescriptorPool interface of the underlying
 pool.
 
 Example usage:
@@ -46,7 +46,7 @@
 """
 
 
-from google.net.proto2.python.public import basic_descriptor_pool
+from google.net.proto2.python.public import descriptor_pool
 
 
 class SymbolDatabase(object):
@@ -63,7 +63,7 @@
 
     self._symbols = {}
     self._symbols_by_file = {}
-    self.pool = basic_descriptor_pool.BasicDescriptorPool()
+    self.pool = descriptor_pool.DescriptorPool()
 
   def RegisterMessage(self, message):
     """Registers the given message type in the local database.
@@ -80,7 +80,7 @@
     if desc.file.name not in self._symbols_by_file:
       self._symbols_by_file[desc.file.name] = {}
     self._symbols_by_file[desc.file.name][desc.full_name] = message
-    self.pool.AddMessage(desc)
+    self.pool.AddDescriptor(desc)
     return message
 
   def RegisterEnumDescriptor(self, enum_descriptor):
@@ -92,7 +92,7 @@
     Returns:
       The provided descriptor.
     """
-    self.pool.AddEnum(enum_descriptor)
+    self.pool.AddEnumDescriptor(enum_descriptor)
     return enum_descriptor
 
   def RegisterFileDescriptor(self, file_descriptor):
@@ -104,7 +104,7 @@
     Returns:
       The provided descriptor.
     """
-    self.pool.AddFile(file_descriptor)
+    self.pool.AddFileDescriptor(file_descriptor)
 
   def GetSymbol(self, symbol):
     """Tries to find a symbol in the local database.
diff --git a/google/net/proto2/python/public/text_format.py b/google/net/proto2/python/public/text_format.py
index d199c22..734fe29 100644
--- a/google/net/proto2/python/public/text_format.py
+++ b/google/net/proto2/python/public/text_format.py
@@ -40,7 +40,11 @@
 _FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
 
 
-class ParseError(Exception):
+class Error(Exception):
+  """Top-level module error for text_format."""
+
+
+class ParseError(Error):
   """Thrown in case of ASCII parsing error."""
 
 
diff --git a/google/storage/speckle/proto/client_error_code_pb2.py b/google/storage/speckle/proto/client_error_code_pb2.py
index ab2a5bf..6dc532d 100644
--- a/google/storage/speckle/proto/client_error_code_pb2.py
+++ b/google/storage/speckle/proto/client_error_code_pb2.py
@@ -22,9 +22,12 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 from google.net.proto2.proto import descriptor_pb2
 
 
+_sym_db = _symbol_database.Default()
+
 
 
 
@@ -33,6 +36,7 @@
   package='speckle.sql',
   serialized_pb=_b('\n-storage/speckle/proto/client_error_code.proto\x12\x0bspeckle.sql\"\xb3\x08\n\x15SqlServiceClientError\"\x99\x08\n\x0f\x43lientErrorCode\x12\x06\n\x02OK\x10\x00\x12\x13\n\x0fTRANSIENT_ERROR\x10\x01\x12\x12\n\x0eINTERNAL_ERROR\x10\x02\x12\x13\n\x0fINVALID_REQUEST\x10\x03\x12\x16\n\x12\x44\x45PRECATED_TIMEOUT\x10\x04\x12\x1d\n\x19\x44\x45PRECATED_NOT_AUTHORIZED\x10\x05\x12\x1a\n\x16\x44\x45PRECATED_RDBMS_ERROR\x10\x06\x12\"\n\x1d\x45RROR_PUBLIC_ERROR_CODE_START\x10\xe8\x07\x12\x10\n\x0b\x45RROR_RDBMS\x10\xe9\x07\x12\x12\n\rERROR_TIMEOUT\x10\xea\x07\x12\x19\n\x14\x45RROR_NOT_AUTHORIZED\x10\xeb\x07\x12\x1d\n\x18\x45RROR_INSTANCE_SUSPENDED\x10\xec\x07\x12\x1c\n\x17\x45RROR_INVALID_PARAMETER\x10\xed\x07\x12\"\n\x1d\x45RROR_NOT_ALL_VARIABLES_BOUND\x10\xee\x07\x12\x1d\n\x18\x45RROR_UNKNOWN_CONNECTION\x10\xef\x07\x12\x1c\n\x17\x45RROR_UNKNOWN_STATEMENT\x10\xf0\x07\x12\x1a\n\x15\x45RROR_UNKNOWN_CATALOG\x10\xf1\x07\x12\x19\n\x14\x45RROR_UNKNOWN_CURSOR\x10\xf2\x07\x12\x1b\n\x16\x45RROR_CURSOR_EXHAUSTED\x10\xfc\x07\x12\x1e\n\x19\x45RROR_NOT_YET_IMPLEMENTED\x10\x86\x08\x12\x1a\n\x15\x45RROR_NOT_IMPLEMENTED\x10\x87\x08\x12\x1f\n\x1a\x45RROR_INSTANCE_MAINTENANCE\x10\x88\x08\x12\'\n\"ERROR_TOO_MANY_CONCURRENT_REQUESTS\x10\x89\x08\x12\"\n\x1d\x45RROR_RESOURCE_DOES_NOT_EXIST\x10\x8a\x08\x12\"\n\x1d\x45RROR_RESOURCE_ALREADY_EXISTS\x10\x8b\x08\x12\x1c\n\x17\x45RROR_CONNECTION_IN_USE\x10\x8c\x08\x12!\n\x1c\x45RROR_CLIENT_VERSION_TOO_OLD\x10\x8d\x08\x12\x1b\n\x16\x45RROR_RESPONSE_PENDING\x10\x8e\x08\x12(\n#ERROR_INSTANCE_SUSPENDED_BY_BILLING\x10\x8f\x08\x12\x1e\n\x19\x45RROR_RESULTSET_TOO_LARGE\x10\x90\x08\x12)\n$ERROR_ACTIVATION_POLICY_SET_TO_NEVER\x10\x91\x08\x12&\n!ERROR_INSTANCE_SUSPENDED_BY_LEGAL\x10\x92\x08\x12\x19\n\x14\x45RROR_QUOTA_EXCEEDED\x10\x93\x08\x12\x32\n-ERROR_INVALID_BINLOG_COORDINATES_IN_DUMP_FILE\x10\x94\x08\x42%\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02P\x01')
 )
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
@@ -184,6 +188,7 @@
   serialized_start=89,
   serialized_end=1138,
 )
+_sym_db.RegisterEnumDescriptor(_SQLSERVICECLIENTERROR_CLIENTERRORCODE)
 
 
 _SQLSERVICECLIENTERROR = _descriptor.Descriptor(
@@ -215,6 +220,7 @@
   __module__ = 'google.storage.speckle.proto.client_error_code_pb2'
 
   ))
+_sym_db.RegisterMessage(SqlServiceClientError)
 
 
 DESCRIPTOR.has_options = True
diff --git a/google/storage/speckle/proto/client_pb2.py b/google/storage/speckle/proto/client_pb2.py
index d91e6b3..68ef46d 100644
--- a/google/storage/speckle/proto/client_pb2.py
+++ b/google/storage/speckle/proto/client_pb2.py
@@ -23,9 +23,12 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 from google.net.proto2.proto import descriptor_pb2
 
 
+_sym_db = _symbol_database.Default()
+
 
 
 
@@ -34,6 +37,7 @@
   package='speckle',
   serialized_pb=_b('\n\"storage/speckle/proto/client.proto\x12\x07speckle\"\xb6\x01\n\x11\x42indVariableProto\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x05\x12\x10\n\x08position\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12;\n\tdirection\x18\x05 \x01(\x0e\x32$.speckle.BindVariableProto.Direction:\x02IN\"\'\n\tDirection\x12\x06\n\x02IN\x10\x01\x12\x07\n\x03OUT\x10\x02\x12\t\n\x05INOUT\x10\x03\"\x8c\x03\n\x0bResultProto\x12\"\n\x04rows\x18\x01 \x01(\x0b\x32\x14.speckle.RowSetProto\x12\x14\n\x0crows_updated\x18\x02 \x01(\x03\x12\x16\n\x0egenerated_keys\x18\x03 \x03(\x0c\x12\'\n\x08warnings\x18\x04 \x03(\x0b\x32\x15.speckle.SqlException\x12,\n\rsql_exception\x18\x05 \x01(\x0b\x32\x15.speckle.SqlException\x12\x14\n\x0cstatement_id\x18\x06 \x01(\x04\x12\x18\n\tmore_rows\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cmore_results\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x33\n\x0foutput_variable\x18\t \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x1a\n\x12\x62\x61tch_rows_updated\x18\n \x03(\x03\x12\x36\n\x12parameter_metadata\x18\x0b \x03(\x0b\x32\x1a.speckle.ParameterMetadata\"\xf1\x05\n\x07OpProto\x12%\n\x04type\x18\x01 \x02(\x0e\x32\x17.speckle.OpProto.OpType\x12\x0f\n\x07\x63\x61talog\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\x12%\n\tsavepoint\x18\x04 \x01(\x0b\x32\x12.speckle.SavePoint\x12\x13\n\x0b\x61uto_commit\x18\x05 \x01(\x08\x12\x11\n\tread_only\x18\x06 \x01(\x08\x12G\n\x1btransaction_isolation_level\x18\x07 \x01(\x0e\x32\".speckle.TransactionIsolationLevel\x12\x14\n\x0cstatement_id\x18\x08 \x01(\x04\x12\x12\n\nrequest_id\x18\t \x01(\x04\"\xde\x03\n\x06OpType\x12\x0e\n\nNATIVE_SQL\x10\x01\x12\x0c\n\x08ROLLBACK\x10\x02\x12\x11\n\rSET_SAVEPOINT\x10\x03\x12\x13\n\x0fSET_AUTO_COMMIT\x10\x04\x12\x11\n\rSET_READ_ONLY\x10\x05\x12#\n\x1fSET_TRANSACTION_ISOLATION_LEVEL\x10\x06\x12\n\n\x06\x43OMMIT\x10\x07\x12\x0f\n\x0bSET_CATALOG\x10\x08\x12\x13\n\x0f\x43LOSE_STATEMENT\x10\t\x12\x08\n\x04PING\x10\n\x12\x0f\n\x0bNEXT_RESULT\x10\x0b\x12\t\n\x05RETRY\x10\x0c\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE13\x10\r\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE14\x10\x0e\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE15\x10\x0f\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE16\x10\x10\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE17\x10\x11\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE18\x10\x12\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE19\x10\x13\x12\x1e\n\x1aVALUE_ENUM_UNKNOWN_VALUE20\x10\x14\"%\n\tSavePoint\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x02(\t\"c\n\x0cSqlException\x12\x0f\n\x07message\x18\x01 \x02(\t\x12\x0f\n\x04\x63ode\x18\x02 \x02(\x05:\x01\x30\x12\x11\n\tsql_state\x18\x03 \x01(\t\x12\x1e\n\x16\x61pplication_error_code\x18\x04 \x01(\x05\"+\n\nTupleProto\x12\x0e\n\x06values\x18\x01 \x03(\x0c\x12\r\n\x05nulls\x18\x02 \x03(\x05\"\xc0\x03\n\x0b\x43olumnProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05label\x18\x02 \x01(\t\x12\x10\n\x04type\x18\x03 \x01(\x05:\x02\x31\x32\x12\x12\n\ntable_name\x18\x04 \x01(\t\x12\x13\n\x0bschema_name\x18\x05 \x01(\t\x12\x14\n\x0c\x63\x61talog_name\x18\x06 \x01(\t\x12\x14\n\tprecision\x18\x07 \x01(\x05:\x01\x30\x12\x10\n\x05scale\x18\x08 \x01(\x05:\x01\x30\x12\x10\n\x08nullable\x18\t \x01(\x08\x12\x12\n\nsearchable\x18\n \x01(\x08\x12\x14\n\x0c\x64isplay_size\x18\x0b \x01(\x05\x12\x1d\n\x0e\x61uto_increment\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63\x61se_sensitive\x18\r \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08\x63urrency\x18\x0e \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x64\x65\x66initely_writable\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x10 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x06signed\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\x17\n\x08writable\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x10\x63olumn_type_name\x18\x13 \x01(\t:\x00\"Y\n\x0bRowSetProto\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.speckle.ColumnProto\x12#\n\x06tuples\x18\x02 \x03(\x0b\x32\x13.speckle.TupleProto\"\xcb\x36\n\x19JdbcDatabaseMetaDataProto\x12*\n\x1b\x61ll_procedures_are_callable\x18\x01 \x01(\x08:\x05\x66\x61lse\x12(\n\x19\x61ll_tables_are_selectable\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x39\n*auto_commit_failure_closes_all_result_sets\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x38\n)data_definition_causes_transaction_commit\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x36\n\'data_definition_ignored_in_transactions\x18\x05 \x01(\x08:\x05\x66\x61lse\x12.\n\x1f\x64oes_max_row_size_include_blobs\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x11\x63\x61talog_separator\x18\x07 \x01(\t\x12\x14\n\x0c\x63\x61talog_term\x18\x08 \x01(\t\x12!\n\x16\x64\x61tabase_major_version\x18\t \x01(\x05:\x01\x30\x12!\n\x16\x64\x61tabase_minor_version\x18\n \x01(\x05:\x01\x30\x12&\n\x15\x64\x61tabase_product_name\x18\x0b \x01(\t:\x07Speckle\x12\"\n\x18\x64\x61tabase_product_version\x18\x0c \x01(\t:\x00\x12u\n\x1d\x64\x65\x66\x61ult_transaction_isolation\x18\r \x01(\x0e\x32\".speckle.TransactionIsolationLevel:*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x12\x1f\n\x15\x65xtra_name_characters\x18\x0e \x01(\t:\x00\x12!\n\x17identifier_quote_string\x18\x0f \x01(\t:\x00\x12\x1d\n\x12jdbc_major_version\x18\x10 \x01(\x05:\x01\x31\x12\x1d\n\x12jdbc_minor_version\x18\x11 \x01(\x05:\x01\x30\x12$\n\x19max_binary_literal_length\x18\x12 \x01(\x05:\x01\x30\x12\"\n\x17max_catalog_name_length\x18\x13 \x01(\x05:\x01\x30\x12\"\n\x17max_char_literal_length\x18\x14 \x01(\x05:\x01\x30\x12!\n\x16max_column_name_length\x18\x15 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_group_by\x18\x16 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_index\x18\x17 \x01(\x05:\x01\x30\x12\"\n\x17max_columns_in_order_by\x18\x18 \x01(\x05:\x01\x30\x12 \n\x15max_columns_in_select\x18\x19 \x01(\x05:\x01\x30\x12\x1f\n\x14max_columns_in_table\x18\x1a \x01(\x05:\x01\x30\x12\x1a\n\x0fmax_connections\x18\x1b \x01(\x05:\x01\x30\x12!\n\x16max_cursor_name_length\x18\x1c \x01(\x05:\x01\x30\x12\x1b\n\x10max_index_length\x18\x1d \x01(\x05:\x01\x30\x12$\n\x19max_procedure_name_length\x18\x1e \x01(\x05:\x01\x30\x12\x17\n\x0cmax_row_size\x18\x1f \x01(\x05:\x01\x30\x12!\n\x16max_schema_name_length\x18  \x01(\x05:\x01\x30\x12\x1f\n\x14max_statement_length\x18! \x01(\x05:\x01\x30\x12\x19\n\x0emax_statements\x18\" \x01(\x05:\x01\x30\x12 \n\x15max_table_name_length\x18# \x01(\x05:\x01\x30\x12\x1f\n\x14max_tables_in_select\x18$ \x01(\x05:\x01\x30\x12\x1f\n\x14max_user_name_length\x18% \x01(\x05:\x01\x30\x12\x1b\n\x11numeric_functions\x18& \x01(\t:\x00\x12\x18\n\x0eprocedure_term\x18\' \x01(\t:\x00\x12j\n\x15resultset_holdability\x18( \x01(\x0e\x32\x1d.speckle.ResultSetHoldability:,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x12i\n\x0erowid_lifetime\x18) \x01(\x0e\x32\x30.speckle.JdbcDatabaseMetaDataProto.RowIdLifetime:\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x12\x14\n\x0csql_keywords\x18* \x01(\t\x12\x63\n\x0esql_state_type\x18+ \x01(\x0e\x32/.speckle.JdbcDatabaseMetaDataProto.SqlStateType:\x1aSQLSTATETYPE_SQL_STATE_SQL\x12\x15\n\x0bschema_term\x18, \x01(\t:\x00\x12\x1c\n\x14search_string_escape\x18- \x01(\t\x12\x1a\n\x10string_functions\x18. \x01(\t:\x00\x12\x1a\n\x10system_functions\x18/ \x01(\t:\x00\x12\x1d\n\x13time_date_functions\x18\x30 \x01(\t:\x00\x12\x13\n\tuser_name\x18\x31 \x01(\t:\x00\x12\x1f\n\x10\x63\x61talog_at_start\x18\x32 \x01(\x08:\x05\x66\x61lse\x12#\n\x14locators_update_copy\x18\x33 \x01(\x08:\x05\x66\x61lse\x12)\n\x1anull_plus_non_null_is_null\x18\x34 \x01(\x08:\x05\x66\x61lse\x12&\n\x17nulls_are_sorted_at_end\x18\x35 \x01(\x08:\x05\x66\x61lse\x12(\n\x19nulls_are_sorted_at_start\x18\x36 \x01(\x08:\x05\x66\x61lse\x12$\n\x15nulls_are_sorted_high\x18\x37 \x01(\x08:\x05\x66\x61lse\x12#\n\x14nulls_are_sorted_low\x18\x38 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_lower_case_identifiers\x18\x39 \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_lower_case_quoted_identifiers\x18: \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_mixed_case_identifiers\x18; \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_mixed_case_quoted_identifiers\x18< \x01(\x08:\x05\x66\x61lse\x12,\n\x1dstores_upper_case_identifiers\x18= \x01(\x08:\x05\x66\x61lse\x12\x33\n$stores_upper_case_quoted_identifiers\x18> \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_ansi92_entry_level_sql\x18? \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_ansi92_full_sql\x18@ \x01(\x08:\x05\x66\x61lse\x12/\n supports_ansi92_intermediate_sql\x18\x41 \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_alter_table_with_add_column\x18\x42 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_alter_table_with_drop_column\x18\x43 \x01(\x08:\x05\x66\x61lse\x12%\n\x16supports_batch_updates\x18\x44 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_data_manipulation\x18\x45 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_index_definitions\x18\x46 \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_catalogs_in_privilege_definitions\x18G \x01(\x08:\x05\x66\x61lse\x12\x33\n$supports_catalogs_in_procedure_calls\x18H \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_catalogs_in_table_definitions\x18I \x01(\x08:\x05\x66\x61lse\x12\'\n\x18supports_column_aliasing\x18J \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10supports_convert\x18K \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_core_sql_grammar\x18L \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_correlated_subqueries\x18M \x01(\x08:\x05\x66\x61lse\x12J\n;supports_data_definition_and_data_manipulation_transactions\x18N \x01(\x08:\x05\x66\x61lse\x12;\n,supports_data_manipulation_transactions_only\x18O \x01(\x08:\x05\x66\x61lse\x12\x39\n*supports_different_table_correlation_names\x18P \x01(\x08:\x05\x66\x61lse\x12/\n supports_expressions_in_order_by\x18Q \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_extended_sql_grammar\x18R \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_full_outer_joins\x18S \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_get_generated_keys\x18T \x01(\x08:\x05\x66\x61lse\x12 \n\x11supports_group_by\x18U \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_group_by_beyond_select\x18V \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_group_by_unrelated\x18W \x01(\x08:\x05\x66\x61lse\x12\x36\n\'supports_integrity_enhancement_facility\x18X \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_like_escape_clause\x18Y \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_limited_outer_joins\x18Z \x01(\x08:\x05\x66\x61lse\x12+\n\x1csupports_minimum_sql_grammar\x18[ \x01(\x08:\x05\x66\x61lse\x12.\n\x1fsupports_mixed_case_identifiers\x18\\ \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_mixed_case_quoted_identifiers\x18] \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_open_results\x18^ \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_multiple_result_sets\x18_ \x01(\x08:\x05\x66\x61lse\x12-\n\x1esupports_multiple_transactions\x18` \x01(\x08:\x05\x66\x61lse\x12(\n\x19supports_named_parameters\x18\x61 \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_non_nullable_columns\x18\x62 \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_open_cursors_across_commit\x18\x63 \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_open_cursors_across_rollback\x18\x64 \x01(\x08:\x05\x66\x61lse\x12\x35\n&supports_open_statements_across_commit\x18\x65 \x01(\x08:\x05\x66\x61lse\x12\x37\n(supports_open_statements_across_rollback\x18\x66 \x01(\x08:\x05\x66\x61lse\x12*\n\x1bsupports_order_by_unrelated\x18g \x01(\x08:\x05\x66\x61lse\x12#\n\x14supports_outer_joins\x18h \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_delete\x18i \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_positioned_update\x18j \x01(\x08:\x05\x66\x61lse\x12\"\n\x13supports_savepoints\x18k \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_data_manipulation\x18l \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_index_definitions\x18m \x01(\x08:\x05\x66\x61lse\x12\x38\n)supports_schemas_in_privilege_definitions\x18n \x01(\x08:\x05\x66\x61lse\x12\x32\n#supports_schemas_in_procedure_calls\x18o \x01(\x08:\x05\x66\x61lse\x12\x34\n%supports_schemas_in_table_definitions\x18p \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_select_for_update\x18q \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_statement_pooling\x18r \x01(\x08:\x05\x66\x61lse\x12:\n+supports_stored_functions_using_call_syntax\x18s \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_stored_procedures\x18t \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_comparisons\x18u \x01(\x08:\x05\x66\x61lse\x12,\n\x1dsupports_subqueries_in_exists\x18v \x01(\x08:\x05\x66\x61lse\x12)\n\x1asupports_subqueries_in_ins\x18w \x01(\x08:\x05\x66\x61lse\x12\x31\n\"supports_subqueries_in_quantifieds\x18x \x01(\x08:\x05\x66\x61lse\x12/\n supports_table_correlation_names\x18y \x01(\x08:\x05\x66\x61lse\x12$\n\x15supports_transactions\x18z \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0esupports_union\x18{ \x01(\x08:\x05\x66\x61lse\x12!\n\x12supports_union_all\x18| \x01(\x08:\x05\x66\x61lse\x12(\n\x19uses_local_file_per_table\x18} \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10uses_local_files\x18~ \x01(\x08:\x05\x66\x61lse\x12\x18\n\tread_only\x18\x7f \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0btable_types\x18\x80\x01 \x03(\t\x12\x11\n\x08\x63\x61talogs\x18\x81\x01 \x03(\t\x12;\n\x07schemas\x18\x82\x01 \x03(\x0b\x32).speckle.JdbcDatabaseMetaDataProto.Schema\x12\x35\n\x14\x64\x65letes_are_detected\x18\x83\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14inserts_are_detected\x18\x84\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x35\n\x14updates_are_detected\x18\x85\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_deletes_are_visible\x18\x86\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_inserts_are_visible\x18\x87\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12;\n\x1aothers_updates_are_visible\x18\x88\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_deletes_are_visible\x18\x89\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_inserts_are_visible\x18\x8a\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x38\n\x17own_updates_are_visible\x18\x8b\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12J\n)supports_result_set_concurrency_updatable\x18\x8c\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12\x39\n\x18supports_result_set_type\x18\x8d\x01 \x03(\x0e\x32\x16.speckle.ResultSetType\x12G\n\x1fsupports_result_set_holdability\x18\x8e\x01 \x03(\x0e\x32\x1d.speckle.ResultSetHoldability\x12Q\n$supports_transaction_isolation_level\x18\x8f\x01 \x03(\x0e\x32\".speckle.TransactionIsolationLevel\x12-\n\x1dgenerated_key_always_returned\x18\x90\x01 \x01(\x08:\x05\x66\x61lse\x1a\x35\n\x06Schema\x12\x14\n\x0ctable_schema\x18\x01 \x01(\t\x12\x15\n\rtable_catalog\x18\x02 \x01(\t\"\xd2\x01\n\rRowIdLifetime\x12#\n\x1fROWIDLIFETIME_ROWID_UNSUPPORTED\x10\x00\x12%\n!ROWIDLIFETIME_ROWID_VALID_FOREVER\x10\x01\x12#\n\x1fROWIDLIFETIME_ROWID_VALID_OTHER\x10\x02\x12%\n!ROWIDLIFETIME_ROWID_VALID_SESSION\x10\x03\x12)\n%ROWIDLIFETIME_ROWID_VALID_TRANSACTION\x10\x04\"r\n\x0cSqlStateType\x12\x1e\n\x1aSQLSTATETYPE_SQL_STATE_SQL\x10\x00\x12 \n\x1cSQLSTATETYPE_SQL_STATE_SQL99\x10\x01\x12 \n\x1cSQLSTATETYPE_SQL_STATE_XOPEN\x10\x02\"&\n\x08Property\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xd6\x03\n\x0b\x45xecOptions\x12%\n\x16include_generated_keys\x18\x01 \x01(\x08:\x05\x66\x61lse\x12 \n\x18generated_column_indices\x18\x02 \x03(\x05\x12\x1e\n\x16generated_column_names\x18\x03 \x03(\t\x12$\n\x04type\x18\x04 \x01(\x0e\x32\x16.speckle.ResultSetType\x12\x32\n\x0b\x63oncurrency\x18\x05 \x01(\x0e\x32\x1d.speckle.ResultSetConcurrency\x12\x32\n\x0bholdability\x18\x06 \x01(\x0e\x32\x1d.speckle.ResultSetHoldability\x12\x12\n\nfetch_size\x18\x07 \x01(\x05\x12\x10\n\x08max_rows\x18\x08 \x01(\x05\x12\x17\n\x08poolable\x18\t \x01(\x08:\x05\x66\x61lse\x12?\n\x0f\x66\x65tch_direction\x18\n \x01(\x0e\x32\x17.speckle.FetchDirection:\rFETCH_FORWARD\x12\x13\n\x0b\x63ursor_name\x18\x0b \x01(\t\x12\x19\n\x0emax_field_size\x18\x0c \x01(\x05:\x01\x30\x12 \n\x11\x65scape_processing\x18\r \x01(\x08:\x05\x66\x61lse\"K\n\x16\x42\x61tchBindVariableProto\x12\x31\n\rbind_variable\x18\x01 \x03(\x0b\x32\x1a.speckle.BindVariableProto\"]\n\nBatchProto\x12\x11\n\tstatement\x18\x01 \x03(\t\x12<\n\x13\x62\x61tch_bind_variable\x18\x02 \x03(\x0b\x32\x1f.speckle.BatchBindVariableProto\"!\n\x11ParameterMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\rRpcErrorProto\x12\x12\n\nerror_code\x18\x01 \x01(\x05\x12\x15\n\rerror_message\x18\x02 \x01(\t*\xb4\x02\n\x19TransactionIsolationLevel\x12.\n*TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE\x10\x00\x12\x38\n4TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_COMMITTED\x10\x02\x12:\n6TRANSACTIONISOLATIONLEVEL_TRANSACTION_READ_UNCOMMITTED\x10\x01\x12\x39\n5TRANSACTIONISOLATIONLEVEL_TRANSACTION_REPEATABLE_READ\x10\x04\x12\x36\n2TRANSACTIONISOLATIONLEVEL_TRANSACTION_SERIALIZABLE\x10\x08*\x8b\x01\n\rResultSetType\x12$\n\x1fRESULTSETTYPE_TYPE_FORWARD_ONLY\x10\xeb\x07\x12*\n%RESULTSETTYPE_TYPE_SCROLL_INSENSITIVE\x10\xec\x07\x12(\n#RESULTSETTYPE_TYPE_SCROLL_SENSITIVE\x10\xed\x07*n\n\x14ResultSetConcurrency\x12*\n%RESULTSETCONCURRENCY_CONCUR_READ_ONLY\x10\xef\x07\x12*\n%RESULTSETCONCURRENCY_CONCUR_UPDATABLE\x10\xf0\x07*{\n\x14ResultSetHoldability\x12\x31\n-RESULTSETHOLDABILITY_HOLD_CURSORS_OVER_COMMIT\x10\x01\x12\x30\n,RESULTSETHOLDABILITY_CLOSE_CURSORS_AT_COMMIT\x10\x02*L\n\x0e\x46\x65tchDirection\x12\x12\n\rFETCH_FORWARD\x10\xe8\x07\x12\x12\n\rFETCH_REVERSE\x10\xe9\x07\x12\x12\n\rFETCH_UNKNOWN\x10\xea\x07*\xc4\t\n\x0cMetadataType\x12(\n$METADATATYPE_DATABASE_METADATA_BASIC\x10\x01\x12-\n)METADATATYPE_DATABASE_METADATA_GET_TABLES\x10\x02\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_PROCEDURES\x10\x03\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_PROCEDURE_COLUMNS\x10\x04\x12.\n*METADATATYPE_DATABASE_METADATA_GET_COLUMNS\x10\x05\x12\x38\n4METADATATYPE_DATABASE_METADATA_GET_COLUMN_PRIVILEGES\x10\x06\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_TABLE_PRIVILEGES\x10\x07\x12:\n6METADATATYPE_DATABASE_METADATA_GET_BEST_ROW_IDENTIFIER\x10\x08\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_VERSION_COLUMNS\x10\t\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_PRIMARY_KEYS\x10\n\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_IMPORTED_KEYS\x10\x0b\x12\x34\n0METADATATYPE_DATABASE_METADATA_GET_EXPORTED_KEYS\x10\x0c\x12\x36\n2METADATATYPE_DATABASE_METADATA_GET_CROSS_REFERENCE\x10\r\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_INDEX_INFO\x10\x0e\x12+\n\'METADATATYPE_DATABASE_METADATA_GET_UDTS\x10\x0f\x12\x32\n.METADATATYPE_DATABASE_METADATA_GET_SUPER_TYPES\x10\x10\x12\x33\n/METADATATYPE_DATABASE_METADATA_GET_SUPER_TABLES\x10\x11\x12\x31\n-METADATATYPE_DATABASE_METADATA_GET_ATTRIBUTES\x10\x12\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_FUNCTIONS\x10\x13\x12\x37\n3METADATATYPE_DATABASE_METADATA_GET_FUNCTION_COLUMNS\x10\x14\x12\x30\n,METADATATYPE_DATABASE_METADATA_GET_TYPE_INFO\x10\x15\x12.\n*METADATATYPE_DATABASE_METADATA_GET_SCHEMAS\x10\x16\x12\x35\n1METADATATYPE_DATABASE_METADATA_GET_PSEUDO_COLUMNS\x10\x17*\xdb\x02\n\nClientType\x12\x19\n\x15\x43LIENT_TYPE_JAVA_JDBC\x10\x01\x12\x1c\n\x18\x43LIENT_TYPE_PYTHON_DBAPI\x10\x02\x12\x17\n\x13\x43LIENT_TYPE_UNKNOWN\x10\x03\x12\x12\n\x0e\x43LIENT_TYPE_GO\x10\x04\x12\x1e\n\x1a\x43LIENT_TYPE_EXPERIMENTAL_1\x10\x05\x12\x16\n\x12\x43LIENT_TYPE_NATIVE\x10\x06\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE7\x10\x07\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE8\x10\x08\x12!\n\x1d\x43LIENT_TYPE_UNKNOWN_LANGUAGE9\x10\t\x12\"\n\x1e\x43LIENT_TYPE_UNKNOWN_LANGUAGE10\x10\n\x12\"\n\x1e\x43LIENT_TYPE_UNKNOWN_LANGUAGE11\x10\x0b\x42#\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02')
 )
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 _TRANSACTIONISOLATIONLEVEL = _descriptor.EnumDescriptor(
   name='TransactionIsolationLevel',
@@ -67,6 +71,7 @@
   serialized_start=9885,
   serialized_end=10193,
 )
+_sym_db.RegisterEnumDescriptor(_TRANSACTIONISOLATIONLEVEL)
 
 TransactionIsolationLevel = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONISOLATIONLEVEL)
 _RESULTSETTYPE = _descriptor.EnumDescriptor(
@@ -93,6 +98,7 @@
   serialized_start=10196,
   serialized_end=10335,
 )
+_sym_db.RegisterEnumDescriptor(_RESULTSETTYPE)
 
 ResultSetType = enum_type_wrapper.EnumTypeWrapper(_RESULTSETTYPE)
 _RESULTSETCONCURRENCY = _descriptor.EnumDescriptor(
@@ -115,6 +121,7 @@
   serialized_start=10337,
   serialized_end=10447,
 )
+_sym_db.RegisterEnumDescriptor(_RESULTSETCONCURRENCY)
 
 ResultSetConcurrency = enum_type_wrapper.EnumTypeWrapper(_RESULTSETCONCURRENCY)
 _RESULTSETHOLDABILITY = _descriptor.EnumDescriptor(
@@ -137,6 +144,7 @@
   serialized_start=10449,
   serialized_end=10572,
 )
+_sym_db.RegisterEnumDescriptor(_RESULTSETHOLDABILITY)
 
 ResultSetHoldability = enum_type_wrapper.EnumTypeWrapper(_RESULTSETHOLDABILITY)
 _FETCHDIRECTION = _descriptor.EnumDescriptor(
@@ -163,6 +171,7 @@
   serialized_start=10574,
   serialized_end=10650,
 )
+_sym_db.RegisterEnumDescriptor(_FETCHDIRECTION)
 
 FetchDirection = enum_type_wrapper.EnumTypeWrapper(_FETCHDIRECTION)
 _METADATATYPE = _descriptor.EnumDescriptor(
@@ -269,6 +278,7 @@
   serialized_start=10653,
   serialized_end=11873,
 )
+_sym_db.RegisterEnumDescriptor(_METADATATYPE)
 
 MetadataType = enum_type_wrapper.EnumTypeWrapper(_METADATATYPE)
 _CLIENTTYPE = _descriptor.EnumDescriptor(
@@ -327,6 +337,7 @@
   serialized_start=11876,
   serialized_end=12223,
 )
+_sym_db.RegisterEnumDescriptor(_CLIENTTYPE)
 
 ClientType = enum_type_wrapper.EnumTypeWrapper(_CLIENTTYPE)
 TRANSACTIONISOLATIONLEVEL_TRANSACTION_NONE = 0
@@ -404,6 +415,7 @@
   serialized_start=191,
   serialized_end=230,
 )
+_sym_db.RegisterEnumDescriptor(_BINDVARIABLEPROTO_DIRECTION)
 
 _OPPROTO_OPTYPE = _descriptor.EnumDescriptor(
   name='OpType',
@@ -497,6 +509,7 @@
   serialized_start=907,
   serialized_end=1385,
 )
+_sym_db.RegisterEnumDescriptor(_OPPROTO_OPTYPE)
 
 _JDBCDATABASEMETADATAPROTO_ROWIDLIFETIME = _descriptor.EnumDescriptor(
   name='RowIdLifetime',
@@ -530,6 +543,7 @@
   serialized_start=8776,
   serialized_end=8986,
 )
+_sym_db.RegisterEnumDescriptor(_JDBCDATABASEMETADATAPROTO_ROWIDLIFETIME)
 
 _JDBCDATABASEMETADATAPROTO_SQLSTATETYPE = _descriptor.EnumDescriptor(
   name='SqlStateType',
@@ -555,6 +569,7 @@
   serialized_start=8988,
   serialized_end=9102,
 )
+_sym_db.RegisterEnumDescriptor(_JDBCDATABASEMETADATAPROTO_SQLSTATETYPE)
 
 
 _BINDVARIABLEPROTO = _descriptor.Descriptor(
@@ -2510,48 +2525,56 @@
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(BindVariableProto)
 
 ResultProto = _reflection.GeneratedProtocolMessageType('ResultProto', (_message.Message,), dict(
   DESCRIPTOR = _RESULTPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(ResultProto)
 
 OpProto = _reflection.GeneratedProtocolMessageType('OpProto', (_message.Message,), dict(
   DESCRIPTOR = _OPPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(OpProto)
 
 SavePoint = _reflection.GeneratedProtocolMessageType('SavePoint', (_message.Message,), dict(
   DESCRIPTOR = _SAVEPOINT,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(SavePoint)
 
 SqlException = _reflection.GeneratedProtocolMessageType('SqlException', (_message.Message,), dict(
   DESCRIPTOR = _SQLEXCEPTION,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(SqlException)
 
 TupleProto = _reflection.GeneratedProtocolMessageType('TupleProto', (_message.Message,), dict(
   DESCRIPTOR = _TUPLEPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(TupleProto)
 
 ColumnProto = _reflection.GeneratedProtocolMessageType('ColumnProto', (_message.Message,), dict(
   DESCRIPTOR = _COLUMNPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(ColumnProto)
 
 RowSetProto = _reflection.GeneratedProtocolMessageType('RowSetProto', (_message.Message,), dict(
   DESCRIPTOR = _ROWSETPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(RowSetProto)
 
 JdbcDatabaseMetaDataProto = _reflection.GeneratedProtocolMessageType('JdbcDatabaseMetaDataProto', (_message.Message,), dict(
 
@@ -2565,42 +2588,50 @@
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(JdbcDatabaseMetaDataProto)
+_sym_db.RegisterMessage(JdbcDatabaseMetaDataProto.Schema)
 
 Property = _reflection.GeneratedProtocolMessageType('Property', (_message.Message,), dict(
   DESCRIPTOR = _PROPERTY,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(Property)
 
 ExecOptions = _reflection.GeneratedProtocolMessageType('ExecOptions', (_message.Message,), dict(
   DESCRIPTOR = _EXECOPTIONS,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(ExecOptions)
 
 BatchBindVariableProto = _reflection.GeneratedProtocolMessageType('BatchBindVariableProto', (_message.Message,), dict(
   DESCRIPTOR = _BATCHBINDVARIABLEPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(BatchBindVariableProto)
 
 BatchProto = _reflection.GeneratedProtocolMessageType('BatchProto', (_message.Message,), dict(
   DESCRIPTOR = _BATCHPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(BatchProto)
 
 ParameterMetadata = _reflection.GeneratedProtocolMessageType('ParameterMetadata', (_message.Message,), dict(
   DESCRIPTOR = _PARAMETERMETADATA,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(ParameterMetadata)
 
 RpcErrorProto = _reflection.GeneratedProtocolMessageType('RpcErrorProto', (_message.Message,), dict(
   DESCRIPTOR = _RPCERRORPROTO,
   __module__ = 'google.storage.speckle.proto.client_pb2'
 
   ))
+_sym_db.RegisterMessage(RpcErrorProto)
 
 
 DESCRIPTOR.has_options = True
diff --git a/google/storage/speckle/proto/sql_pb2.py b/google/storage/speckle/proto/sql_pb2.py
index 6ca1c50..e7bca80 100644
--- a/google/storage/speckle/proto/sql_pb2.py
+++ b/google/storage/speckle/proto/sql_pb2.py
@@ -22,6 +22,7 @@
 from google.net.proto2.python.public import descriptor as _descriptor
 from google.net.proto2.python.public import message as _message
 from google.net.proto2.python.public import reflection as _reflection
+from google.net.proto2.python.public import symbol_database as _symbol_database
 from google.net.proto2.proto import descriptor_pb2
 import sys
 try:
@@ -40,6 +41,8 @@
   _server_stub_base_class = object
 
 
+_sym_db = _symbol_database.Default()
+
 
 import google.storage.speckle.proto.client_pb2
 
@@ -50,6 +53,7 @@
   serialized_pb=_b('\n\x1fstorage/speckle/proto/sql.proto\x12\x0bspeckle.sql\x1a\"storage/speckle/proto/client.proto\"\x8c\x03\n\x0b\x45xecRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x14\n\x0cstatement_id\x18\x02 \x01(\x04\x12\x11\n\tstatement\x18\x03 \x01(\t\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12%\n\x07options\x18\x06 \x01(\x0b\x32\x14.speckle.ExecOptions\x12I\n\x0estatement_type\x18\t \x01(\x0e\x32&.speckle.sql.ExecRequest.StatementType:\tSTATEMENT\x12\"\n\x05\x62\x61tch\x18\n \x01(\x0b\x32\x13.speckle.BatchProto\x12\x12\n\nrequest_id\x18\x0b \x01(\x04\"N\n\rStatementType\x12\r\n\tSTATEMENT\x10\x01\x12\x16\n\x12PREPARED_STATEMENT\x10\x02\x12\x16\n\x12\x43\x41LLABLE_STATEMENT\x10\x03\"b\n\x0c\x45xecResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\"j\n\rExecOpRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\x12\x1c\n\x02op\x18\x03 \x02(\x0b\x32\x10.speckle.OpProto\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xed\x01\n\x0e\x45xecOpResponse\x12\x12\n\nnative_sql\x18\x01 \x01(\t\x12%\n\tsavepoint\x18\x02 \x01(\x0b\x32\x12.speckle.SavePoint\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\x12$\n\x06result\x18\x04 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x30\n\x10\x63\x61\x63hed_rpc_error\x18\x05 \x01(\x0b\x32\x16.speckle.RpcErrorProto\x12\x1a\n\x0e\x63\x61\x63hed_payload\x18\x06 \x01(\x0c\x42\x02\x08\x01\"\xaa\x01\n\x0fMetadataRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\'\n\x08metadata\x18\x03 \x02(\x0e\x32\x15.speckle.MetadataType\x12\x31\n\rbind_variable\x18\x04 \x03(\x0b\x32\x1a.speckle.BindVariableProto\x12\x15\n\rconnection_id\x18\x05 \x02(\x0c\x12\x12\n\nrequest_id\x18\x08 \x01(\x04\"\xaa\x01\n\x10MetadataResponse\x12$\n\x06result\x18\x01 \x01(\x0b\x32\x14.speckle.ResultProto\x12\x42\n\x16jdbc_database_metadata\x18\x02 \x01(\x0b\x32\".speckle.JdbcDatabaseMetaDataProto\x12,\n\rsql_exception\x18\x03 \x01(\x0b\x32\x15.speckle.SqlException\"\xac\x01\n\x15OpenConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12#\n\x08property\x18\x02 \x03(\x0b\x32\x11.speckle.Property\x12\x1b\n\x10protocol_version\x18\x05 \x01(\x04:\x01\x31\x12?\n\x0b\x63lient_type\x18\x06 \x01(\x0e\x32\x13.speckle.ClientType:\x15\x43LIENT_TYPE_JAVA_JDBC\"\x86\x01\n\x16OpenConnectionResponse\x12\x15\n\rconnection_id\x18\x01 \x01(\x0c\x12,\n\rsql_exception\x18\x02 \x01(\x0b\x32\x15.speckle.SqlException\x12\'\n\x08warnings\x18\x06 \x03(\x0b\x32\x15.speckle.SqlException\"A\n\x16\x43loseConnectionRequest\x12\x10\n\x08instance\x18\x01 \x02(\t\x12\x15\n\rconnection_id\x18\x02 \x02(\x0c\"G\n\x17\x43loseConnectionResponse\x12,\n\rsql_exception\x18\x01 \x01(\x0b\x32\x15.speckle.SqlException2\xac\x03\n\nSqlService\x12?\n\x04\x45xec\x12\x18.speckle.sql.ExecRequest\x1a\x19.speckle.sql.ExecResponse\"\x02P\x01\x12\x45\n\x06\x45xecOp\x12\x1a.speckle.sql.ExecOpRequest\x1a\x1b.speckle.sql.ExecOpResponse\"\x02P\x01\x12N\n\x0bGetMetadata\x12\x1c.speckle.sql.MetadataRequest\x1a\x1d.speckle.sql.MetadataResponse\"\x02P\x01\x12\x64\n\x0eOpenConnection\x12\".speckle.sql.OpenConnectionRequest\x1a#.speckle.sql.OpenConnectionResponse\"\tP\x01\x9a\x01\x04read\x12`\n\x0f\x43loseConnection\x12#.speckle.sql.CloseConnectionRequest\x1a$.speckle.sql.CloseConnectionResponse\"\x02P\x01\x42.\n\x1b\x63om.google.protos.cloud.sql\x10\x02 \x02(\x02P\x01\x80\x01\x00\x88\x01\x00\x90\x01\x00')
   ,
   dependencies=[google.storage.speckle.proto.client_pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
@@ -77,6 +81,7 @@
   serialized_start=403,
   serialized_end=481,
 )
+_sym_db.RegisterEnumDescriptor(_EXECREQUEST_STATEMENTTYPE)
 
 
 _EXECREQUEST = _descriptor.Descriptor(
@@ -600,60 +605,70 @@
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(ExecRequest)
 
 ExecResponse = _reflection.GeneratedProtocolMessageType('ExecResponse', (_message.Message,), dict(
   DESCRIPTOR = _EXECRESPONSE,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(ExecResponse)
 
 ExecOpRequest = _reflection.GeneratedProtocolMessageType('ExecOpRequest', (_message.Message,), dict(
   DESCRIPTOR = _EXECOPREQUEST,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(ExecOpRequest)
 
 ExecOpResponse = _reflection.GeneratedProtocolMessageType('ExecOpResponse', (_message.Message,), dict(
   DESCRIPTOR = _EXECOPRESPONSE,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(ExecOpResponse)
 
 MetadataRequest = _reflection.GeneratedProtocolMessageType('MetadataRequest', (_message.Message,), dict(
   DESCRIPTOR = _METADATAREQUEST,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(MetadataRequest)
 
 MetadataResponse = _reflection.GeneratedProtocolMessageType('MetadataResponse', (_message.Message,), dict(
   DESCRIPTOR = _METADATARESPONSE,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(MetadataResponse)
 
 OpenConnectionRequest = _reflection.GeneratedProtocolMessageType('OpenConnectionRequest', (_message.Message,), dict(
   DESCRIPTOR = _OPENCONNECTIONREQUEST,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(OpenConnectionRequest)
 
 OpenConnectionResponse = _reflection.GeneratedProtocolMessageType('OpenConnectionResponse', (_message.Message,), dict(
   DESCRIPTOR = _OPENCONNECTIONRESPONSE,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(OpenConnectionResponse)
 
 CloseConnectionRequest = _reflection.GeneratedProtocolMessageType('CloseConnectionRequest', (_message.Message,), dict(
   DESCRIPTOR = _CLOSECONNECTIONREQUEST,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(CloseConnectionRequest)
 
 CloseConnectionResponse = _reflection.GeneratedProtocolMessageType('CloseConnectionResponse', (_message.Message,), dict(
   DESCRIPTOR = _CLOSECONNECTIONRESPONSE,
   __module__ = 'google.storage.speckle.proto.sql_pb2'
 
   ))
+_sym_db.RegisterMessage(CloseConnectionResponse)
 
 
 DESCRIPTOR.has_options = True
diff --git a/lib/cacerts/urlfetch_cacerts.txt b/lib/cacerts/urlfetch_cacerts.txt
index 0c61953..c1f65b2 100644
--- a/lib/cacerts/urlfetch_cacerts.txt
+++ b/lib/cacerts/urlfetch_cacerts.txt
@@ -31808,28 +31808,6 @@
 G0pID0nsP9iH2xyG+8F+Fxcxt7ve0T2YGYfgMg==
 -----END CERTIFICATE-----
 
-subject= /C=ZA/O=Thawte Consulting (Pty) Ltd./CN=Thawte SGC CA
-serial=30000002
------BEGIN CERTIFICATE-----
-MIIDIzCCAoygAwIBAgIEMAAAAjANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJV
-UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xNzA1BgNVBAsTLkNsYXNzIDMgUHVi
-bGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQwNTEzMDAw
-MDAwWhcNMTQwNTEyMjM1OTU5WjBMMQswCQYDVQQGEwJaQTElMCMGA1UEChMcVGhh
-d3RlIENvbnN1bHRpbmcgKFB0eSkgTHRkLjEWMBQGA1UEAxMNVGhhd3RlIFNHQyBD
-QTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1NNn0I0Vf67NMf59HZGhPwtx
-PKzMyGT7Y/wySweUvW+Aui/hBJPAM/wJMyPpC3QrccQDxtLN4i/1CWPN/0ilAL/g
-5/OIty0y3pg25gqtAHvEZEo7hHUD8nCSfQ5i9SGraTaEMXWQ+L/HbIgbBpV8yeWo
-3nWhLHpo39XKHIdYYBkCAwEAAaOB/jCB+zASBgNVHRMBAf8ECDAGAQH/AgEAMAsG
-A1UdDwQEAwIBBjARBglghkgBhvhCAQEEBAMCAQYwKAYDVR0RBCEwH6QdMBsxGTAX
-BgNVBAMTEFByaXZhdGVMYWJlbDMtMTUwMQYDVR0fBCowKDAmoCSgIoYgaHR0cDov
-L2NybC52ZXJpc2lnbi5jb20vcGNhMy5jcmwwMgYIKwYBBQUHAQEEJjAkMCIGCCsG
-AQUFBzABhhZodHRwOi8vb2NzcC50aGF3dGUuY29tMDQGA1UdJQQtMCsGCCsGAQUF
-BwMBBggrBgEFBQcDAgYJYIZIAYb4QgQBBgpghkgBhvhFAQgBMA0GCSqGSIb3DQEB
-BQUAA4GBAFWsY+reod3SkF+fC852vhNRj5PZBSvIG3dLrWlQoe7e3P3bB+noOZTc
-q3J5Lwa/q4FwxKjt6lM07e8eU9kGx1Yr0Vz00YqOtCuxN5BICEIlxT6Ky3/rbwTR
-bcV0oveifHtgPHfNDs5IAn8BL7abN+AqKjbc1YXWrOU/VG+WHgWv
------END CERTIFICATE-----
-
 subject= /O=Thawte/OU=Thawte Universal CA Root/CN=Thawte Universal CA Root
 serial=00
 -----BEGIN CERTIFICATE-----
@@ -34078,30 +34056,6 @@
 GU+5JrG8vCyy4YGms2G19EVgLyx1xcgtiEsmu3DuO38BLQ==
 -----END CERTIFICATE-----
 
-subject= /O=eSign Australia/OU=Gatekeeper PKI/CN=Gatekeeper Root CA
-serial=728F0D4E8F154E8A3F3FD9C5B59FA164
------BEGIN CERTIFICATE-----
-MIIDijCCAnKgAwIBAgIQco8NTo8VToo/P9nFtZ+hZDANBgkqhkiG9w0BAQQFADBQ
-MRgwFgYDVQQKEw9lU2lnbiBBdXN0cmFsaWExFzAVBgNVBAsTDkdhdGVrZWVwZXIg
-UEtJMRswGQYDVQQDExJHYXRla2VlcGVyIFJvb3QgQ0EwHhcNMDIwNTI0MDAwMDAw
-WhcNMTQwNTIzMjM1OTU5WjBQMRgwFgYDVQQKEw9lU2lnbiBBdXN0cmFsaWExFzAV
-BgNVBAsTDkdhdGVrZWVwZXIgUEtJMRswGQYDVQQDExJHYXRla2VlcGVyIFJvb3Qg
-Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9+vfGbs6hpJSjFgjq
-cFRkyFA2qQZW1IhFRXqwTdBQN50hCcyuGPKcK7XW/r7ohg8V2yY3lapJU7gRraew
-5MSj7CdnhtsVjXhPK3WxaHHTgk6XCliMeaco/mE8EdR6aPMI5Z6Em+jVcuyGXpLk
-S/hhSORmKr08GsGKj4yDYhEPXmYlCRki+nQgvP1qvIqnPhoX7oMU6pZb1fNxQvSq
-5Fus6uK+UdWV94SGcobI2iKx5znr5PpGinJtYqhJjhqNcy9jacjKhmrnKbAazGov
-kBE1U5U+t5GGnZ7LKZ4iDv2WyoSyFpFmIBHM3WDK3krrbfZ2qPmR4J9/D3Ewl4cz
-PXc1AgMBAAGjYDBeMA8GA1UdEwQIMAYBAf8CAQgwCwYDVR0PBAQDAgEGMB0GA1Ud
-DgQWBBSBt6XKJBoVNQud/bUsDXKwcrmLFTAfBgNVHSMEGDAWgBSBt6XKJBoVNQud
-/bUsDXKwcrmLFTANBgkqhkiG9w0BAQQFAAOCAQEAVK6qtADWKvGBkfy1i3GUvPqF
-U8ueJP7z8zovy/pcI34ly32jJkUnK42ttMHJU7WopSxuqT+jrSN39w4NV9Q5uhyZ
-kM5mI6r1wo9QcYpg5/iN+q4advJ6OrLmH3gWR/CbLy/zpw/DGg3g+G9Q+3/voA8X
-ZUDpejhy0VRxM18Uc7sb2+OUxSoWNKDCyChwUzxkk0Z1RQgkxffLttYRRfZvtKrn
-v/8rH8OGC8QG7b9k1V5FLomh/IqHNt5+8dRhL/aTpGmTDKk6QRsAe/vsk3hY81zr
-bmxWUP9g3+aH7EzEm5omuTmUzHlYgUsn+OrxkfspBAZjIQxTcEaTqMHvClzIwQ==
------END CERTIFICATE-----
-
 subject= /C=ES/O=FNMT/OU=FNMT Clase 2 CA
 serial=36F11B19
 -----BEGIN CERTIFICATE-----
diff --git a/lib/distutils/distutils/LICENSE b/lib/distutils/distutils/LICENSE
new file mode 100644
index 0000000..5cdb01e
--- /dev/null
+++ b/lib/distutils/distutils/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com).  In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property.  Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.2             2.1.1       2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2.1           2.2         2002        PSF         yes
+    2.2.2           2.2.1       2002        PSF         yes
+    2.2.3           2.2.2       2003        PSF         yes
+    2.3             2.2.2       2002-2003   PSF         yes
+    2.3.1           2.3         2002-2003   PSF         yes
+    2.3.2           2.3.1       2002-2003   PSF         yes
+    2.3.3           2.3.2       2002-2003   PSF         yes
+    2.3.4           2.3.3       2004        PSF         yes
+    2.3.5           2.3.4       2005        PSF         yes
+    2.4             2.3         2004        PSF         yes
+    2.4.1           2.4         2005        PSF         yes
+    2.4.2           2.4.1       2005        PSF         yes
+    2.4.3           2.4.2       2006        PSF         yes
+    2.4.4           2.4.3       2006        PSF         yes
+    2.5             2.4         2006        PSF         yes
+    2.5.1           2.5         2007        PSF         yes
+    2.5.2           2.5.1       2008        PSF         yes
+    2.5.3           2.5.2       2008        PSF         yes
+    2.6             2.5         2008        PSF         yes
+    2.6.1           2.6         2008        PSF         yes
+    2.6.2           2.6.1       2009        PSF         yes
+    2.6.3           2.6.2       2009        PSF         yes
+    2.6.4           2.6.3       2009        PSF         yes
+    2.6.5           2.6.4       2010        PSF         yes
+    2.7             2.6         2010        PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+Python Software Foundation; All Rights Reserved" are retained in Python alone or
+in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/lib/distutils/distutils/README b/lib/distutils/distutils/README
new file mode 100644
index 0000000..408a203
--- /dev/null
+++ b/lib/distutils/distutils/README
@@ -0,0 +1,13 @@
+This directory contains the Distutils package.
+
+There's a full documentation available at:
+
+    http://docs.python.org/distutils/
+
+The Distutils-SIG web page is also a good starting point:
+
+    http://www.python.org/sigs/distutils-sig/
+
+WARNING : Distutils must remain compatible with 2.3
+
+$Id$
diff --git a/lib/distutils/distutils/__init__.py b/lib/distutils/distutils/__init__.py
new file mode 100644
index 0000000..a849f1a
--- /dev/null
+++ b/lib/distutils/distutils/__init__.py
@@ -0,0 +1,19 @@
+"""distutils
+
+The main package for the Python Module Distribution Utilities.  Normally
+used from a setup script as
+
+   from distutils.core import setup
+
+   setup (...)
+"""
+
+__revision__ = "$Id$"
+
+# Distutils version
+#
+# Updated automatically by the Python release process.
+#
+#--start constants--
+__version__ = "2.7.2"
+#--end constants--
diff --git a/lib/distutils/distutils/archive_util.py b/lib/distutils/distutils/archive_util.py
new file mode 100644
index 0000000..834b722
--- /dev/null
+++ b/lib/distutils/distutils/archive_util.py
@@ -0,0 +1,243 @@
+"""distutils.archive_util
+
+Utility functions for creating archive files (tarballs, zip files,
+that sort of thing)."""
+
+__revision__ = "$Id$"
+
+import os
+from warnings import warn
+import sys
+
+from distutils.errors import DistutilsExecError
+from distutils.spawn import spawn
+from distutils.dir_util import mkpath
+from distutils import log
+
+try:
+    from pwd import getpwnam
+except ImportError:
+    getpwnam = None
+
+try:
+    from grp import getgrnam
+except ImportError:
+    getgrnam = None
+
+def _get_gid(name):
+    """Returns a gid, given a group name."""
+    if getgrnam is None or name is None:
+        return None
+    try:
+        result = getgrnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result[2]
+    return None
+
+def _get_uid(name):
+    """Returns an uid, given a user name."""
+    if getpwnam is None or name is None:
+        return None
+    try:
+        result = getpwnam(name)
+    except KeyError:
+        result = None
+    if result is not None:
+        return result[2]
+    return None
+
+def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
+                 owner=None, group=None):
+    """Create a (possibly compressed) tar file from all the files under
+    'base_dir'.
+
+    'compress' must be "gzip" (the default), "compress", "bzip2", or None.
+    (compress will be deprecated in Python 3.2)
+
+    'owner' and 'group' can be used to define an owner and a group for the
+    archive that is being built. If not provided, the current owner and group
+    will be used.
+
+    The output tar file will be named 'base_dir' +  ".tar", possibly plus
+    the appropriate compression extension (".gz", ".bz2" or ".Z").
+
+    Returns the output filename.
+    """
+    tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''}
+    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'}
+
+    # flags for compression program, each element of list will be an argument
+    if compress is not None and compress not in compress_ext.keys():
+        raise ValueError, \
+              ("bad value for 'compress': must be None, 'gzip', 'bzip2' "
+               "or 'compress'")
+
+    archive_name = base_name + '.tar'
+    if compress != 'compress':
+        archive_name += compress_ext.get(compress, '')
+
+    mkpath(os.path.dirname(archive_name), dry_run=dry_run)
+
+    # creating the tarball
+    import tarfile  # late import so Python build itself doesn't break
+
+    log.info('Creating tar archive')
+
+    uid = _get_uid(owner)
+    gid = _get_gid(group)
+
+    def _set_uid_gid(tarinfo):
+        if gid is not None:
+            tarinfo.gid = gid
+            tarinfo.gname = group
+        if uid is not None:
+            tarinfo.uid = uid
+            tarinfo.uname = owner
+        return tarinfo
+
+    if not dry_run:
+        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+        try:
+            tar.add(base_dir, filter=_set_uid_gid)
+        finally:
+            tar.close()
+
+    # compression using `compress`
+    if compress == 'compress':
+        warn("'compress' will be deprecated.", PendingDeprecationWarning)
+        # the option varies depending on the platform
+        compressed_name = archive_name + compress_ext[compress]
+        if sys.platform == 'win32':
+            cmd = [compress, archive_name, compressed_name]
+        else:
+            cmd = [compress, '-f', archive_name]
+        spawn(cmd, dry_run=dry_run)
+        return compressed_name
+
+    return archive_name
+
+def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
+    """Create a zip file from all the files under 'base_dir'.
+
+    The output zip file will be named 'base_name' + ".zip".  Uses either the
+    "zipfile" Python module (if available) or the InfoZIP "zip" utility
+    (if installed and found on the default search path).  If neither tool is
+    available, raises DistutilsExecError.  Returns the name of the output zip
+    file.
+    """
+    try:
+        import zipfile
+    except ImportError:
+        zipfile = None
+
+    zip_filename = base_name + ".zip"
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+
+    # If zipfile module is not available, try spawning an external
+    # 'zip' command.
+    if zipfile is None:
+        if verbose:
+            zipoptions = "-r"
+        else:
+            zipoptions = "-rq"
+
+        try:
+            spawn(["zip", zipoptions, zip_filename, base_dir],
+                  dry_run=dry_run)
+        except DistutilsExecError:
+            # XXX really should distinguish between "couldn't find
+            # external 'zip' command" and "zip failed".
+            raise DistutilsExecError, \
+                  ("unable to create zip file '%s': "
+                   "could neither import the 'zipfile' module nor "
+                   "find a standalone zip utility") % zip_filename
+
+    else:
+        log.info("creating '%s' and adding '%s' to it",
+                 zip_filename, base_dir)
+
+        if not dry_run:
+            zip = zipfile.ZipFile(zip_filename, "w",
+                                  compression=zipfile.ZIP_DEFLATED)
+
+            for dirpath, dirnames, filenames in os.walk(base_dir):
+                for name in filenames:
+                    path = os.path.normpath(os.path.join(dirpath, name))
+                    if os.path.isfile(path):
+                        zip.write(path, path)
+                        log.info("adding '%s'" % path)
+            zip.close()
+
+    return zip_filename
+
+ARCHIVE_FORMATS = {
+    'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+    'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
+    'ztar':  (make_tarball, [('compress', 'compress')], "compressed tar file"),
+    'tar':   (make_tarball, [('compress', None)], "uncompressed tar file"),
+    'zip':   (make_zipfile, [],"ZIP file")
+    }
+
+def check_archive_formats(formats):
+    """Returns the first format from the 'format' list that is unknown.
+
+    If all formats are known, returns None
+    """
+    for format in formats:
+        if format not in ARCHIVE_FORMATS:
+            return format
+    return None
+
+def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
+                 dry_run=0, owner=None, group=None):
+    """Create an archive file (eg. zip or tar).
+
+    'base_name' is the name of the file to create, minus any format-specific
+    extension; 'format' is the archive format: one of "zip", "tar", "ztar",
+    or "gztar".
+
+    'root_dir' is a directory that will be the root directory of the
+    archive; ie. we typically chdir into 'root_dir' before creating the
+    archive.  'base_dir' is the directory where we start archiving from;
+    ie. 'base_dir' will be the common prefix of all files and
+    directories in the archive.  'root_dir' and 'base_dir' both default
+    to the current directory.  Returns the name of the archive file.
+
+    'owner' and 'group' are used when creating a tar archive. By default,
+    uses the current owner and group.
+    """
+    save_cwd = os.getcwd()
+    if root_dir is not None:
+        log.debug("changing into '%s'", root_dir)
+        base_name = os.path.abspath(base_name)
+        if not dry_run:
+            os.chdir(root_dir)
+
+    if base_dir is None:
+        base_dir = os.curdir
+
+    kwargs = {'dry_run': dry_run}
+
+    try:
+        format_info = ARCHIVE_FORMATS[format]
+    except KeyError:
+        raise ValueError, "unknown archive format '%s'" % format
+
+    func = format_info[0]
+    for arg, val in format_info[1]:
+        kwargs[arg] = val
+
+    if format != 'zip':
+        kwargs['owner'] = owner
+        kwargs['group'] = group
+
+    try:
+        filename = func(base_name, base_dir, **kwargs)
+    finally:
+        if root_dir is not None:
+            log.debug("changing back to '%s'", save_cwd)
+            os.chdir(save_cwd)
+
+    return filename
diff --git a/lib/distutils/distutils/bcppcompiler.py b/lib/distutils/distutils/bcppcompiler.py
new file mode 100644
index 0000000..f26e7ae
--- /dev/null
+++ b/lib/distutils/distutils/bcppcompiler.py
@@ -0,0 +1,394 @@
+"""distutils.bcppcompiler
+
+Contains BorlandCCompiler, an implementation of the abstract CCompiler class
+for the Borland C++ compiler.
+"""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler!  --GPW
+
+__revision__ = "$Id$"
+
+import os
+
+from distutils.errors import (DistutilsExecError, CompileError, LibError,
+                              LinkError, UnknownFileError)
+from distutils.ccompiler import CCompiler, gen_preprocess_options
+from distutils.file_util import write_file
+from distutils.dep_util import newer
+from distutils import log
+
+class BCPPCompiler(CCompiler) :
+    """Concrete class that implements an interface to the Borland C/C++
+    compiler, as defined by the CCompiler abstract class.
+    """
+
+    compiler_type = 'bcpp'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = _c_extensions + _cpp_extensions
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        CCompiler.__init__ (self, verbose, dry_run, force)
+
+        # These executables are assumed to all be in the path.
+        # Borland doesn't seem to use any special registry settings to
+        # indicate their installation locations.
+
+        self.cc = "bcc32.exe"
+        self.linker = "ilink32.exe"
+        self.lib = "tlib.exe"
+
+        self.preprocess_options = None
+        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_static = []
+        self.ldflags_exe = ['/Gn', '/q', '/x']
+        self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+    # -- Worker methods ------------------------------------------------
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=0,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        compile_opts = extra_preargs or []
+        compile_opts.append ('-c')
+        if debug:
+            compile_opts.extend (self.compile_options_debug)
+        else:
+            compile_opts.extend (self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            # XXX why do the normpath here?
+            src = os.path.normpath(src)
+            obj = os.path.normpath(obj)
+            # XXX _setup_compile() did a mkpath() too but before the normpath.
+            # Is it possible to skip the normpath?
+            self.mkpath(os.path.dirname(obj))
+
+            if ext == '.res':
+                # This is already a binary file -- skip it.
+                continue # the 'for' loop
+            if ext == '.rc':
+                # This needs to be compiled to a .res file -- do it now.
+                try:
+                    self.spawn (["brcc32", "-fo", obj, src])
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue # the 'for' loop
+
+            # The next two are both for the real compiler.
+            if ext in self._c_extensions:
+                input_opt = ""
+            elif ext in self._cpp_extensions:
+                input_opt = "-P"
+            else:
+                # Unknown file type -- no extra options.  The compiler
+                # will probably fail, but let it just in case this is a
+                # file the compiler recognizes even if we don't.
+                input_opt = ""
+
+            output_opt = "-o" + obj
+
+            # Compiler command line syntax is: "bcc32 [options] file(s)".
+            # Note that the source file names must appear at the end of
+            # the command line.
+            try:
+                self.spawn ([self.cc] + compile_opts + pp_opts +
+                            [input_opt, output_opt] +
+                            extra_postargs + [src])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+        return objects
+
+    # compile ()
+
+
+    def create_static_lib (self,
+                           objects,
+                           output_libname,
+                           output_dir=None,
+                           debug=0,
+                           target_lang=None):
+
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        output_filename = \
+            self.library_filename (output_libname, output_dir=output_dir)
+
+        if self._need_link (objects, output_filename):
+            lib_args = [output_filename, '/u'] + objects
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn ([self.lib] + lib_args)
+            except DistutilsExecError, msg:
+                raise LibError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # create_static_lib ()
+
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # XXX this ignores 'build_temp'!  should follow the lead of
+        # msvccompiler.py
+
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        (libraries, library_dirs, runtime_library_dirs) = \
+            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            log.warn("I don't know what to do with 'runtime_library_dirs': %s",
+                     str(runtime_library_dirs))
+
+        if output_dir is not None:
+            output_filename = os.path.join (output_dir, output_filename)
+
+        if self._need_link (objects, output_filename):
+
+            # Figure out linker args based on type of target.
+            if target_desc == CCompiler.EXECUTABLE:
+                startup_obj = 'c0w32'
+                if debug:
+                    ld_args = self.ldflags_exe_debug[:]
+                else:
+                    ld_args = self.ldflags_exe[:]
+            else:
+                startup_obj = 'c0d32'
+                if debug:
+                    ld_args = self.ldflags_shared_debug[:]
+                else:
+                    ld_args = self.ldflags_shared[:]
+
+
+            # Create a temporary exports file for use by the linker
+            if export_symbols is None:
+                def_file = ''
+            else:
+                head, tail = os.path.split (output_filename)
+                modname, ext = os.path.splitext (tail)
+                temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+                def_file = os.path.join (temp_dir, '%s.def' % modname)
+                contents = ['EXPORTS']
+                for sym in (export_symbols or []):
+                    contents.append('  %s=_%s' % (sym, sym))
+                self.execute(write_file, (def_file, contents),
+                             "writing %s" % def_file)
+
+            # Borland C++ has problems with '/' in paths
+            objects2 = map(os.path.normpath, objects)
+            # split objects in .obj and .res files
+            # Borland C++ needs them at different positions in the command line
+            objects = [startup_obj]
+            resources = []
+            for file in objects2:
+                (base, ext) = os.path.splitext(os.path.normcase(file))
+                if ext == '.res':
+                    resources.append(file)
+                else:
+                    objects.append(file)
+
+
+            for l in library_dirs:
+                ld_args.append("/L%s" % os.path.normpath(l))
+            ld_args.append("/L.") # we sometimes use relative paths
+
+            # list of object files
+            ld_args.extend(objects)
+
+            # XXX the command-line syntax for Borland C++ is a bit wonky;
+            # certain filenames are jammed together in one big string, but
+            # comma-delimited.  This doesn't mesh too well with the
+            # Unix-centric attitude (with a DOS/Windows quoting hack) of
+            # 'spawn()', so constructing the argument list is a bit
+            # awkward.  Note that doing the obvious thing and jamming all
+            # the filenames and commas into one argument would be wrong,
+            # because 'spawn()' would quote any filenames with spaces in
+            # them.  Arghghh!.  Apparently it works fine as coded...
+
+            # name of dll/exe file
+            ld_args.extend([',',output_filename])
+            # no map file and start libraries
+            ld_args.append(',,')
+
+            for lib in libraries:
+                # see if we find it and if there is a bcpp specific lib
+                # (xxx_bcpp.lib)
+                libfile = self.find_library_file(library_dirs, lib, debug)
+                if libfile is None:
+                    ld_args.append(lib)
+                    # probably a BCPP internal library -- don't warn
+                else:
+                    # full name which prefers bcpp_xxx.lib over xxx.lib
+                    ld_args.append(libfile)
+
+            # some default libraries
+            ld_args.append ('import32')
+            ld_args.append ('cw32mt')
+
+            # def file for export symbols
+            ld_args.extend([',',def_file])
+            # add resource files
+            ld_args.append(',')
+            ld_args.extend(resources)
+
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath (os.path.dirname (output_filename))
+            try:
+                self.spawn ([self.linker] + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+
+    def find_library_file (self, dirs, lib, debug=0):
+        # List of effective library names to try, in order of preference:
+        # xxx_bcpp.lib is better than xxx.lib
+        # and xxx_d.lib is better than xxx.lib if debug is set
+        #
+        # The "_bcpp" suffix is to handle a Python installation for people
+        # with multiple compilers (primarily Distutils hackers, I suspect
+        # ;-).  The idea is they'd have one static library for each
+        # compiler they care about, since (almost?) every Windows compiler
+        # seems to have a different format for static libraries.
+        if debug:
+            dlib = (lib + "_d")
+            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+        else:
+            try_names = (lib + "_bcpp", lib)
+
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.res':
+                # these can go unchanged
+                obj_names.append (os.path.join (output_dir, base + ext))
+            elif ext == '.rc':
+                # these need to be compiled to .res-files
+                obj_names.append (os.path.join (output_dir, base + '.res'))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+    def preprocess (self,
+                    source,
+                    output_file=None,
+                    macros=None,
+                    include_dirs=None,
+                    extra_preargs=None,
+                    extra_postargs=None):
+
+        (_, macros, include_dirs) = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = ['cpp32.exe'] + pp_opts
+        if output_file is not None:
+            pp_args.append('-o' + output_file)
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or the
+        # source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except DistutilsExecError, msg:
+                print msg
+                raise CompileError, msg
+
+    # preprocess()
diff --git a/lib/distutils/distutils/ccompiler.py b/lib/distutils/distutils/ccompiler.py
new file mode 100644
index 0000000..c2b1f6f
--- /dev/null
+++ b/lib/distutils/distutils/ccompiler.py
@@ -0,0 +1,1145 @@
+"""distutils.ccompiler
+
+Contains CCompiler, an abstract base class that defines the interface
+for the Distutils compiler abstraction model."""
+
+__revision__ = "$Id$"
+
+import sys
+import os
+import re
+
+from distutils.errors import (CompileError, LinkError, UnknownFileError,
+                              DistutilsPlatformError, DistutilsModuleError)
+from distutils.spawn import spawn
+from distutils.file_util import move_file
+from distutils.dir_util import mkpath
+from distutils.dep_util import newer_group
+from distutils.util import split_quoted, execute
+from distutils import log
+
+_sysconfig = __import__('sysconfig')
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
+            _sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+                                       'CCSHARED', 'LDSHARED', 'SO', 'AR',
+                                       'ARFLAGS')
+
+        if 'CC' in os.environ:
+            cc = os.environ['CC']
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            archiver = ar + ' ' + ar_flags
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = so_ext
+
+class CCompiler:
+    """Abstract base class to define the interface that must be implemented
+    by real compiler classes.  Also has some utility methods used by
+    several compiler classes.
+
+    The basic idea behind a compiler abstraction class is that each
+    instance can be used for all the compile/link steps in building a
+    single project.  Thus, attributes common to all of those compile and
+    link steps -- include directories, macros to define, libraries to link
+    against, etc. -- are attributes of the compiler instance.  To allow for
+    variability in how individual files are treated, most of those
+    attributes may be varied on a per-compilation or per-link basis.
+    """
+
+    # 'compiler_type' is a class attribute that identifies this class.  It
+    # keeps code that wants to know what kind of compiler it's dealing with
+    # from having to import all possible compiler classes just to do an
+    # 'isinstance'.  In concrete CCompiler subclasses, 'compiler_type'
+    # should really, really be one of the keys of the 'compiler_class'
+    # dictionary (see below -- used by the 'new_compiler()' factory
+    # function) -- authors of new compiler interface classes are
+    # responsible for updating 'compiler_class'!
+    compiler_type = None
+
+    # XXX things not handled by this compiler abstraction model:
+    #   * client can't provide additional options for a compiler,
+    #     e.g. warning, optimization, debugging flags.  Perhaps this
+    #     should be the domain of concrete compiler abstraction classes
+    #     (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+    #     class should have methods for the common ones.
+    #   * can't completely override the include or library searchg
+    #     path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+    #     I'm not sure how widely supported this is even by Unix
+    #     compilers, much less on other platforms.  And I'm even less
+    #     sure how useful it is; maybe for cross-compiling, but
+    #     support for that is a ways off.  (And anyways, cross
+    #     compilers probably have a dedicated binary with the
+    #     right paths compiled in.  I hope.)
+    #   * can't do really freaky things with the library list/library
+    #     dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+    #     different versions of libfoo.a in different locations.  I
+    #     think this is useless without the ability to null out the
+    #     library search path anyways.
+
+
+    # Subclasses that rely on the standard filename generation methods
+    # implemented below should override these; see the comment near
+    # those methods ('object_filenames()' et. al.) for details:
+    src_extensions = None               # list of strings
+    obj_extension = None                # string
+    static_lib_extension = None
+    shared_lib_extension = None         # string
+    static_lib_format = None            # format string
+    shared_lib_format = None            # prob. same as static_lib_format
+    exe_extension = None                # string
+
+    # Default language settings. language_map is used to detect a source
+    # file or Extension target language, checking source filenames.
+    # language_order is used to detect the language precedence, when deciding
+    # what language to use when mixing source types. For example, if some
+    # extension has two files with ".c" extension, and one with ".cpp", it
+    # is still linked as c++.
+    language_map = {".c"   : "c",
+                    ".cc"  : "c++",
+                    ".cpp" : "c++",
+                    ".cxx" : "c++",
+                    ".m"   : "objc",
+                   }
+    language_order = ["c++", "objc", "c"]
+
+    def __init__ (self, verbose=0, dry_run=0, force=0):
+        self.dry_run = dry_run
+        self.force = force
+        self.verbose = verbose
+
+        # 'output_dir': a common output directory for object, library,
+        # shared object, and shared library files
+        self.output_dir = None
+
+        # 'macros': a list of macro definitions (or undefinitions).  A
+        # macro definition is a 2-tuple (name, value), where the value is
+        # either a string or None (no explicit value).  A macro
+        # undefinition is a 1-tuple (name,).
+        self.macros = []
+
+        # 'include_dirs': a list of directories to search for include files
+        self.include_dirs = []
+
+        # 'libraries': a list of libraries to include in any link
+        # (library names, not filenames: eg. "foo" not "libfoo.a")
+        self.libraries = []
+
+        # 'library_dirs': a list of directories to search for libraries
+        self.library_dirs = []
+
+        # 'runtime_library_dirs': a list of directories to search for
+        # shared libraries/objects at runtime
+        self.runtime_library_dirs = []
+
+        # 'objects': a list of object files (or similar, such as explicitly
+        # named library files) to include on any link
+        self.objects = []
+
+        for key in self.executables.keys():
+            self.set_executable(key, self.executables[key])
+
+    def set_executables(self, **args):
+        """Define the executables (and options for them) that will be run
+        to perform the various stages of compilation.  The exact set of
+        executables that may be specified here depends on the compiler
+        class (via the 'executables' class attribute), but most will have:
+          compiler      the C/C++ compiler
+          linker_so     linker used to create shared objects and libraries
+          linker_exe    linker used to create binary executables
+          archiver      static library creator
+
+        On platforms with a command-line (Unix, DOS/Windows), each of these
+        is a string that will be split into executable name and (optional)
+        list of arguments.  (Splitting the string is done similarly to how
+        Unix shells operate: words are delimited by spaces, but quotes and
+        backslashes can override this.  See
+        'distutils.util.split_quoted()'.)
+        """
+
+        # Note that some CCompiler implementation classes will define class
+        # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+        # this is appropriate when a compiler class is for exactly one
+        # compiler/OS combination (eg. MSVCCompiler).  Other compiler
+        # classes (UnixCCompiler, in particular) are driven by information
+        # discovered at run-time, since there are many different ways to do
+        # basically the same things with Unix C compilers.
+
+        for key in args.keys():
+            if key not in self.executables:
+                raise ValueError, \
+                      "unknown executable '%s' for class %s" % \
+                      (key, self.__class__.__name__)
+            self.set_executable(key, args[key])
+
+    def set_executable(self, key, value):
+        if isinstance(value, str):
+            setattr(self, key, split_quoted(value))
+        else:
+            setattr(self, key, value)
+
+    def _find_macro(self, name):
+        i = 0
+        for defn in self.macros:
+            if defn[0] == name:
+                return i
+            i = i + 1
+        return None
+
+    def _check_macro_definitions(self, definitions):
+        """Ensures that every element of 'definitions' is a valid macro
+        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
+        nothing if all definitions are OK, raise TypeError otherwise.
+        """
+        for defn in definitions:
+            if not (isinstance(defn, tuple) and
+                    (len (defn) == 1 or
+                     (len (defn) == 2 and
+                      (isinstance(defn[1], str) or defn[1] is None))) and
+                    isinstance(defn[0], str)):
+                raise TypeError, \
+                      ("invalid macro definition '%s': " % defn) + \
+                      "must be tuple (string,), (string, string), or " + \
+                      "(string, None)"
+
+
+    # -- Bookkeeping methods -------------------------------------------
+
+    def define_macro(self, name, value=None):
+        """Define a preprocessor macro for all compilations driven by this
+        compiler object.  The optional parameter 'value' should be a
+        string; if it is not supplied, then the macro will be defined
+        without an explicit value and the exact outcome depends on the
+        compiler used (XXX true? does ANSI say anything about this?)
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro (name)
+        if i is not None:
+            del self.macros[i]
+
+        defn = (name, value)
+        self.macros.append (defn)
+
+    def undefine_macro(self, name):
+        """Undefine a preprocessor macro for all compilations driven by
+        this compiler object.  If the same macro is defined by
+        'define_macro()' and undefined by 'undefine_macro()' the last call
+        takes precedence (including multiple redefinitions or
+        undefinitions).  If the macro is redefined/undefined on a
+        per-compilation basis (ie. in the call to 'compile()'), then that
+        takes precedence.
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro (name)
+        if i is not None:
+            del self.macros[i]
+
+        undefn = (name,)
+        self.macros.append (undefn)
+
+    def add_include_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        header files.  The compiler is instructed to search directories in
+        the order in which they are supplied by successive calls to
+        'add_include_dir()'.
+        """
+        self.include_dirs.append (dir)
+
+    def set_include_dirs(self, dirs):
+        """Set the list of directories that will be searched to 'dirs' (a
+        list of strings).  Overrides any preceding calls to
+        'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+        to the list passed to 'set_include_dirs()'.  This does not affect
+        any list of standard include directories that the compiler may
+        search by default.
+        """
+        self.include_dirs = dirs[:]
+
+    def add_library(self, libname):
+        """Add 'libname' to the list of libraries that will be included in
+        all links driven by this compiler object.  Note that 'libname'
+        should *not* be the name of a file containing a library, but the
+        name of the library itself: the actual filename will be inferred by
+        the linker, the compiler, or the compiler class (depending on the
+        platform).
+
+        The linker will be instructed to link against libraries in the
+        order they were supplied to 'add_library()' and/or
+        'set_libraries()'.  It is perfectly valid to duplicate library
+        names; the linker will be instructed to link against libraries as
+        many times as they are mentioned.
+        """
+        self.libraries.append (libname)
+
+    def set_libraries(self, libnames):
+        """Set the list of libraries to be included in all links driven by
+        this compiler object to 'libnames' (a list of strings).  This does
+        not affect any standard system libraries that the linker may
+        include by default.
+        """
+        self.libraries = libnames[:]
+
+
+    def add_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        libraries specified to 'add_library()' and 'set_libraries()'.  The
+        linker will be instructed to search for libraries in the order they
+        are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+        """
+        self.library_dirs.append(dir)
+
+    def set_library_dirs(self, dirs):
+        """Set the list of library search directories to 'dirs' (a list of
+        strings).  This does not affect any standard library search path
+        that the linker may search by default.
+        """
+        self.library_dirs = dirs[:]
+
+    def add_runtime_library_dir(self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        shared libraries at runtime.
+        """
+        self.runtime_library_dirs.append(dir)
+
+    def set_runtime_library_dirs(self, dirs):
+        """Set the list of directories to search for shared libraries at
+        runtime to 'dirs' (a list of strings).  This does not affect any
+        standard search path that the runtime linker may search by
+        default.
+        """
+        self.runtime_library_dirs = dirs[:]
+
+    def add_link_object(self, object):
+        """Add 'object' to the list of object files (or analogues, such as
+        explicitly named library files or the output of "resource
+        compilers") to be included in every link driven by this compiler
+        object.
+        """
+        self.objects.append(object)
+
+    def set_link_objects(self, objects):
+        """Set the list of object files (or analogues) to be included in
+        every link to 'objects'.  This does not affect any standard object
+        files that the linker may include by default (such as system
+        libraries).
+        """
+        self.objects = objects[:]
+
+
+    # -- Private utility methods --------------------------------------
+    # (here for the convenience of subclasses)
+
+    # Helper method to prep compiler in subclass compile() methods
+
+    def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+                       extra):
+        """Process arguments and decide which source files to compile."""
+        if outdir is None:
+            outdir = self.output_dir
+        elif not isinstance(outdir, str):
+            raise TypeError, "'output_dir' must be a string or None"
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError, "'macros' (if supplied) must be a list of tuples"
+
+        if incdirs is None:
+            incdirs = self.include_dirs
+        elif isinstance(incdirs, (list, tuple)):
+            incdirs = list(incdirs) + (self.include_dirs or [])
+        else:
+            raise TypeError, \
+                  "'include_dirs' (if supplied) must be a list of strings"
+
+        if extra is None:
+            extra = []
+
+        # Get the list of expected output (object) files
+        objects = self.object_filenames(sources,
+                                        strip_dir=0,
+                                        output_dir=outdir)
+        assert len(objects) == len(sources)
+
+        pp_opts = gen_preprocess_options(macros, incdirs)
+
+        build = {}
+        for i in range(len(sources)):
+            src = sources[i]
+            obj = objects[i]
+            ext = os.path.splitext(src)[1]
+            self.mkpath(os.path.dirname(obj))
+            build[obj] = (src, ext)
+
+        return macros, objects, extra, pp_opts, build
+
+    def _get_cc_args(self, pp_opts, debug, before):
+        # works for unixccompiler, emxccompiler, cygwinccompiler
+        cc_args = pp_opts + ['-c']
+        if debug:
+            cc_args[:0] = ['-g']
+        if before:
+            cc_args[:0] = before
+        return cc_args
+
+    def _fix_compile_args(self, output_dir, macros, include_dirs):
+        """Typecheck and fix-up some of the arguments to the 'compile()'
+        method, and return fixed-up values.  Specifically: if 'output_dir'
+        is None, replaces it with 'self.output_dir'; ensures that 'macros'
+        is a list, and augments it with 'self.macros'; ensures that
+        'include_dirs' is a list, and augments it with 'self.include_dirs'.
+        Guarantees that the returned values are of the correct type,
+        i.e. for 'output_dir' either string or None, and for 'macros' and
+        'include_dirs' either list or None.
+        """
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError, "'output_dir' must be a string or None"
+
+        if macros is None:
+            macros = self.macros
+        elif isinstance(macros, list):
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError, "'macros' (if supplied) must be a list of tuples"
+
+        if include_dirs is None:
+            include_dirs = self.include_dirs
+        elif isinstance(include_dirs, (list, tuple)):
+            include_dirs = list (include_dirs) + (self.include_dirs or [])
+        else:
+            raise TypeError, \
+                  "'include_dirs' (if supplied) must be a list of strings"
+
+        return output_dir, macros, include_dirs
+
+    def _fix_object_args(self, objects, output_dir):
+        """Typecheck and fix up some arguments supplied to various methods.
+        Specifically: ensure that 'objects' is a list; if output_dir is
+        None, replace with self.output_dir.  Return fixed versions of
+        'objects' and 'output_dir'.
+        """
+        if not isinstance(objects, (list, tuple)):
+            raise TypeError, \
+                  "'objects' must be a list or tuple of strings"
+        objects = list (objects)
+
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif not isinstance(output_dir, str):
+            raise TypeError, "'output_dir' must be a string or None"
+
+        return (objects, output_dir)
+
+    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+        """Typecheck and fix up some of the arguments supplied to the
+        'link_*' methods.  Specifically: ensure that all arguments are
+        lists, and augment them with their permanent versions
+        (eg. 'self.libraries' augments 'libraries').  Return a tuple with
+        fixed versions of all arguments.
+        """
+        if libraries is None:
+            libraries = self.libraries
+        elif isinstance(libraries, (list, tuple)):
+            libraries = list (libraries) + (self.libraries or [])
+        else:
+            raise TypeError, \
+                  "'libraries' (if supplied) must be a list of strings"
+
+        if library_dirs is None:
+            library_dirs = self.library_dirs
+        elif isinstance(library_dirs, (list, tuple)):
+            library_dirs = list (library_dirs) + (self.library_dirs or [])
+        else:
+            raise TypeError, \
+                  "'library_dirs' (if supplied) must be a list of strings"
+
+        if runtime_library_dirs is None:
+            runtime_library_dirs = self.runtime_library_dirs
+        elif isinstance(runtime_library_dirs, (list, tuple)):
+            runtime_library_dirs = (list (runtime_library_dirs) +
+                                    (self.runtime_library_dirs or []))
+        else:
+            raise TypeError, \
+                  "'runtime_library_dirs' (if supplied) " + \
+                  "must be a list of strings"
+
+        return (libraries, library_dirs, runtime_library_dirs)
+
+    def _need_link(self, objects, output_file):
+        """Return true if we need to relink the files listed in 'objects'
+        to recreate 'output_file'.
+        """
+        if self.force:
+            return 1
+        else:
+            if self.dry_run:
+                newer = newer_group (objects, output_file, missing='newer')
+            else:
+                newer = newer_group (objects, output_file)
+            return newer
+
+    def detect_language(self, sources):
+        """Detect the language of a given file, or list of files. Uses
+        language_map, and language_order to do the job.
+        """
+        if not isinstance(sources, list):
+            sources = [sources]
+        lang = None
+        index = len(self.language_order)
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            extlang = self.language_map.get(ext)
+            try:
+                extindex = self.language_order.index(extlang)
+                if extindex < index:
+                    lang = extlang
+                    index = extindex
+            except ValueError:
+                pass
+        return lang
+
+    # -- Worker methods ------------------------------------------------
+    # (must be implemented by subclasses)
+
+    def preprocess(self, source, output_file=None, macros=None,
+                   include_dirs=None, extra_preargs=None, extra_postargs=None):
+        """Preprocess a single C/C++ source file, named in 'source'.
+        Output will be written to file named 'output_file', or stdout if
+        'output_file' not supplied.  'macros' is a list of macro
+        definitions as for 'compile()', which will augment the macros set
+        with 'define_macro()' and 'undefine_macro()'.  'include_dirs' is a
+        list of directory names that will be added to the default list.
+
+        Raises PreprocessError on failure.
+        """
+        pass
+
+    def compile(self, sources, output_dir=None, macros=None,
+                include_dirs=None, debug=0, extra_preargs=None,
+                extra_postargs=None, depends=None):
+        """Compile one or more source files.
+
+        'sources' must be a list of filenames, most likely C/C++
+        files, but in reality anything that can be handled by a
+        particular compiler and compiler class (eg. MSVCCompiler can
+        handle resource files in 'sources').  Return a list of object
+        filenames, one per source filename in 'sources'.  Depending on
+        the implementation, not all source files will necessarily be
+        compiled, but all corresponding object filenames will be
+        returned.
+
+        If 'output_dir' is given, object files will be put under it, while
+        retaining their original path component.  That is, "foo/bar.c"
+        normally compiles to "foo/bar.o" (for a Unix implementation); if
+        'output_dir' is "build", then it would compile to
+        "build/foo/bar.o".
+
+        'macros', if given, must be a list of macro definitions.  A macro
+        definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+        The former defines a macro; if the value is None, the macro is
+        defined without an explicit value.  The 1-tuple case undefines a
+        macro.  Later definitions/redefinitions/ undefinitions take
+        precedence.
+
+        'include_dirs', if given, must be a list of strings, the
+        directories to add to the default include file search path for this
+        compilation only.
+
+        'debug' is a boolean; if true, the compiler will be instructed to
+        output debug symbols in (or alongside) the object file(s).
+
+        'extra_preargs' and 'extra_postargs' are implementation- dependent.
+        On platforms that have the notion of a command-line (e.g. Unix,
+        DOS/Windows), they are most likely lists of strings: extra
+        command-line arguments to prepand/append to the compiler command
+        line.  On other platforms, consult the implementation class
+        documentation.  In any event, they are intended as an escape hatch
+        for those occasions when the abstract compiler framework doesn't
+        cut the mustard.
+
+        'depends', if given, is a list of filenames that all targets
+        depend on.  If a source file is older than any file in
+        depends, then the source file will be recompiled.  This
+        supports dependency tracking, but only at a coarse
+        granularity.
+
+        Raises CompileError on failure.
+        """
+        # A concrete compiler class can either override this method
+        # entirely or implement _compile().
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+        # Return *all* object filenames, not just the ones we just built.
+        return objects
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile 'src' to product 'obj'."""
+
+        # A concrete compiler class that does not override compile()
+        # should implement _compile().
+        pass
+
+    def create_static_lib(self, objects, output_libname, output_dir=None,
+                          debug=0, target_lang=None):
+        """Link a bunch of stuff together to create a static library file.
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects', the extra object files supplied to
+        'add_link_object()' and/or 'set_link_objects()', the libraries
+        supplied to 'add_library()' and/or 'set_libraries()', and the
+        libraries supplied as 'libraries' (if any).
+
+        'output_libname' should be a library name, not a filename; the
+        filename will be inferred from the library name.  'output_dir' is
+        the directory where the library file will be put.
+
+        'debug' is a boolean; if true, debugging information will be
+        included in the library (note that on most platforms, it is the
+        compile step where this matters: the 'debug' flag is included here
+        just for consistency).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LibError on failure.
+        """
+        pass
+
+    # values for target_desc parameter in link()
+    SHARED_OBJECT = "shared_object"
+    SHARED_LIBRARY = "shared_library"
+    EXECUTABLE = "executable"
+
+    def link(self, target_desc, objects, output_filename, output_dir=None,
+             libraries=None, library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=0, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        """Link a bunch of stuff together to create an executable or
+        shared library file.
+
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects'.  'output_filename' should be a filename.  If
+        'output_dir' is supplied, 'output_filename' is relative to it
+        (i.e. 'output_filename' can provide directory components if
+        needed).
+
+        'libraries' is a list of libraries to link against.  These are
+        library names, not filenames, since they're translated into
+        filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+        on Unix and "foo.lib" on DOS/Windows).  However, they can include a
+        directory component, which means the linker will look in that
+        specific directory rather than searching all the normal locations.
+
+        'library_dirs', if supplied, should be a list of directories to
+        search for libraries that were specified as bare library names
+        (ie. no directory component).  These are on top of the system
+        default and those supplied to 'add_library_dir()' and/or
+        'set_library_dirs()'.  'runtime_library_dirs' is a list of
+        directories that will be embedded into the shared library and used
+        to search for other shared libraries that *it* depends on at
+        run-time.  (This may only be relevant on Unix.)
+
+        'export_symbols' is a list of symbols that the shared library will
+        export.  (This appears to be relevant only on Windows.)
+
+        'debug' is as for 'compile()' and 'create_static_lib()', with the
+        slight distinction that it actually matters on most platforms (as
+        opposed to 'create_static_lib()', which includes a 'debug' flag
+        mostly for form's sake).
+
+        'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+        of course that they supply command-line arguments for the
+        particular linker being used).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LinkError on failure.
+        """
+        raise NotImplementedError
+
+
+    # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+    def link_shared_lib(self, objects, output_libname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, export_symbols=None,
+                        debug=0, extra_preargs=None, extra_postargs=None,
+                        build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_LIBRARY, objects,
+                  self.library_filename(output_libname, lib_type='shared'),
+                  output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+
+    def link_shared_object(self, objects, output_filename, output_dir=None,
+                           libraries=None, library_dirs=None,
+                           runtime_library_dirs=None, export_symbols=None,
+                           debug=0, extra_preargs=None, extra_postargs=None,
+                           build_temp=None, target_lang=None):
+        self.link(CCompiler.SHARED_OBJECT, objects,
+                  output_filename, output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+    def link_executable(self, objects, output_progname, output_dir=None,
+                        libraries=None, library_dirs=None,
+                        runtime_library_dirs=None, debug=0, extra_preargs=None,
+                        extra_postargs=None, target_lang=None):
+        self.link(CCompiler.EXECUTABLE, objects,
+                  self.executable_filename(output_progname), output_dir,
+                  libraries, library_dirs, runtime_library_dirs, None,
+                  debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function; there is
+    # no appropriate default implementation so subclasses should
+    # implement all of these.
+
+    def library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for libraries.
+        """
+        raise NotImplementedError
+
+    def runtime_library_dir_option(self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for runtime libraries.
+        """
+        raise NotImplementedError
+
+    def library_option(self, lib):
+        """Return the compiler option to add 'dir' to the list of libraries
+        linked into the shared library or executable.
+        """
+        raise NotImplementedError
+
+    def has_function(self, funcname, includes=None, include_dirs=None,
+                     libraries=None, library_dirs=None):
+        """Return a boolean indicating whether funcname is supported on
+        the current platform.  The optional arguments can be used to
+        augment the compilation environment.
+        """
+
+        # this can't be included at module scope because it tries to
+        # import math which might not be available at that point - maybe
+        # the necessary logic should just be inlined?
+        import tempfile
+        if includes is None:
+            includes = []
+        if include_dirs is None:
+            include_dirs = []
+        if libraries is None:
+            libraries = []
+        if library_dirs is None:
+            library_dirs = []
+        fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+        f = os.fdopen(fd, "w")
+        try:
+            for incl in includes:
+                f.write("""#include "%s"\n""" % incl)
+            f.write("""\
+main (int argc, char **argv) {
+    %s();
+}
+""" % funcname)
+        finally:
+            f.close()
+        try:
+            objects = self.compile([fname], include_dirs=include_dirs)
+        except CompileError:
+            return False
+
+        try:
+            self.link_executable(objects, "a.out",
+                                 libraries=libraries,
+                                 library_dirs=library_dirs)
+        except (LinkError, TypeError):
+            return False
+        return True
+
+    def find_library_file (self, dirs, lib, debug=0):
+        """Search the specified list of directories for a static or shared
+        library file 'lib' and return the full path to that file.  If
+        'debug' true, look for a debugging version (if that makes sense on
+        the current platform).  Return None if 'lib' wasn't found in any of
+        the specified directories.
+        """
+        raise NotImplementedError
+
+    # -- Filename generation methods -----------------------------------
+
+    # The default implementation of the filename generating methods are
+    # prejudiced towards the Unix/DOS/Windows view of the world:
+    #   * object files are named by replacing the source file extension
+    #     (eg. .c/.cpp -> .o/.obj)
+    #   * library files (shared or static) are named by plugging the
+    #     library name and extension into a format string, eg.
+    #     "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+    #   * executables are named by appending an extension (possibly
+    #     empty) to the program name: eg. progname + ".exe" for
+    #     Windows
+    #
+    # To reduce redundant code, these methods expect to find
+    # several attributes in the current object (presumably defined
+    # as class attributes):
+    #   * src_extensions -
+    #     list of C/C++ source file extensions, eg. ['.c', '.cpp']
+    #   * obj_extension -
+    #     object file extension, eg. '.o' or '.obj'
+    #   * static_lib_extension -
+    #     extension for static library files, eg. '.a' or '.lib'
+    #   * shared_lib_extension -
+    #     extension for shared library/object files, eg. '.so', '.dll'
+    #   * static_lib_format -
+    #     format string for generating static library filenames,
+    #     eg. 'lib%s.%s' or '%s.%s'
+    #   * shared_lib_format
+    #     format string for generating shared library filenames
+    #     (probably same as static_lib_format, since the extension
+    #     is one of the intended parameters to the format string)
+    #   * exe_extension -
+    #     extension for executable files, eg. '' or '.exe'
+
+    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % (ext, src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            obj_names.append(os.path.join(output_dir,
+                                          base + self.obj_extension))
+        return obj_names
+
+    def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename (basename)
+        return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+    def executable_filename(self, basename, strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename (basename)
+        return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+    def library_filename(self, libname, lib_type='static',     # or 'shared'
+                         strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if lib_type not in ("static", "shared", "dylib"):
+            raise ValueError, "'lib_type' must be \"static\", \"shared\" or \"dylib\""
+        fmt = getattr(self, lib_type + "_lib_format")
+        ext = getattr(self, lib_type + "_lib_extension")
+
+        dir, base = os.path.split (libname)
+        filename = fmt % (base, ext)
+        if strip_dir:
+            dir = ''
+
+        return os.path.join(output_dir, dir, filename)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def announce(self, msg, level=1):
+        log.debug(msg)
+
+    def debug_print(self, msg):
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+
+    def warn(self, msg):
+        sys.stderr.write("warning: %s\n" % msg)
+
+    def execute(self, func, args, msg=None, level=1):
+        execute(func, args, msg, self.dry_run)
+
+    def spawn(self, cmd):
+        spawn(cmd, dry_run=self.dry_run)
+
+    def move_file(self, src, dst):
+        return move_file(src, dst, dry_run=self.dry_run)
+
+    def mkpath(self, name, mode=0777):
+        mkpath(name, mode, dry_run=self.dry_run)
+
+
+# class CCompiler
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+
+    # Platform string mappings
+
+    # on a cygwin built python we can use gcc like an ordinary UNIXish
+    # compiler
+    ('cygwin.*', 'unix'),
+    ('os2emx', 'emx'),
+
+    # OS name mappings
+    ('posix', 'unix'),
+    ('nt', 'msvc'),
+
+    )
+
+def get_default_compiler(osname=None, platform=None):
+    """ Determine the default compiler to use for the given platform.
+
+        osname should be one of the standard Python OS names (i.e. the
+        ones returned by os.name) and platform the common value
+        returned by sys.platform for the platform in question.
+
+        The default values are os.name and sys.platform in case the
+        parameters are not given.
+
+    """
+    if osname is None:
+        osname = os.name
+    if platform is None:
+        platform = sys.platform
+    for pattern, compiler in _default_compilers:
+        if re.match(pattern, platform) is not None or \
+           re.match(pattern, osname) is not None:
+            return compiler
+    # Default to Unix compiler
+    return 'unix'
+
+# Map compiler types to (module_name, class_name) pairs -- ie. where to
+# find the code that implements an interface to this compiler.  (The module
+# is assumed to be in the 'distutils' package.)
+compiler_class = { 'unix':    ('unixccompiler', 'UnixCCompiler',
+                               "standard UNIX-style compiler"),
+                   'msvc':    ('msvccompiler', 'MSVCCompiler',
+                               "Microsoft Visual C++"),
+                   'cygwin':  ('cygwinccompiler', 'CygwinCCompiler',
+                               "Cygwin port of GNU C Compiler for Win32"),
+                   'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
+                               "Mingw32 port of GNU C Compiler for Win32"),
+                   'bcpp':    ('bcppcompiler', 'BCPPCompiler',
+                               "Borland C++ Compiler"),
+                   'emx':     ('emxccompiler', 'EMXCCompiler',
+                               "EMX port of GNU C Compiler for OS/2"),
+                 }
+
+def show_compilers():
+    """Print list of available compilers (used by the "--help-compiler"
+    options to "build", "build_ext", "build_clib").
+    """
+    # XXX this "knows" that the compiler option it's describing is
+    # "--compiler", which just happens to be the case for the three
+    # commands that use it.
+    from distutils.fancy_getopt import FancyGetopt
+    compilers = []
+    for compiler in compiler_class.keys():
+        compilers.append(("compiler="+compiler, None,
+                          compiler_class[compiler][2]))
+    compilers.sort()
+    pretty_printer = FancyGetopt(compilers)
+    pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
+    """Generate an instance of some CCompiler subclass for the supplied
+    platform/compiler combination.  'plat' defaults to 'os.name'
+    (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+    for that platform.  Currently only 'posix' and 'nt' are supported, and
+    the default compilers are "traditional Unix interface" (UnixCCompiler
+    class) and Visual C++ (MSVCCompiler class).  Note that it's perfectly
+    possible to ask for a Unix compiler object under Windows, and a
+    Microsoft compiler object under Unix -- if you supply a value for
+    'compiler', 'plat' is ignored.
+    """
+    if plat is None:
+        plat = os.name
+
+    try:
+        if compiler is None:
+            compiler = get_default_compiler(plat)
+
+        (module_name, class_name, long_description) = compiler_class[compiler]
+    except KeyError:
+        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        if compiler is not None:
+            msg = msg + " with '%s' compiler" % compiler
+        raise DistutilsPlatformError, msg
+
+    try:
+        module_name = "distutils." + module_name
+        __import__ (module_name)
+        module = sys.modules[module_name]
+        klass = vars(module)[class_name]
+    except ImportError:
+        raise DistutilsModuleError, \
+              "can't compile C/C++ code: unable to load module '%s'" % \
+              module_name
+    except KeyError:
+        raise DistutilsModuleError, \
+              ("can't compile C/C++ code: unable to find class '%s' " +
+               "in module '%s'") % (class_name, module_name)
+
+    # XXX The None is necessary to preserve backwards compatibility
+    # with classes that expect verbose to be the first positional
+    # argument.
+    return klass(None, dry_run, force)
+
+
+def gen_preprocess_options(macros, include_dirs):
+    """Generate C pre-processor options (-D, -U, -I) as used by at least
+    two types of compilers: the typical Unix compiler and Visual C++.
+    'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+    means undefine (-U) macro 'name', and (name,value) means define (-D)
+    macro 'name' to 'value'.  'include_dirs' is just a list of directory
+    names to be added to the header file search path (-I).  Returns a list
+    of command-line options suitable for either Unix compilers or Visual
+    C++.
+    """
+    # XXX it would be nice (mainly aesthetic, and so we don't generate
+    # stupid-looking command lines) to go over 'macros' and eliminate
+    # redundant definitions/undefinitions (ie. ensure that only the
+    # latest mention of a particular macro winds up on the command
+    # line).  I don't think it's essential, though, since most (all?)
+    # Unix C compilers only pay attention to the latest -D or -U
+    # mention of a macro on their command line.  Similar situation for
+    # 'include_dirs'.  I'm punting on both for now.  Anyways, weeding out
+    # redundancies like this should probably be the province of
+    # CCompiler, since the data structures used are inherited from it
+    # and therefore common to all CCompiler classes.
+
+    pp_opts = []
+    for macro in macros:
+
+        if not (isinstance(macro, tuple) and
+                1 <= len (macro) <= 2):
+            raise TypeError, \
+                  ("bad macro definition '%s': " +
+                   "each element of 'macros' list must be a 1- or 2-tuple") % \
+                  macro
+
+        if len (macro) == 1:        # undefine this macro
+            pp_opts.append ("-U%s" % macro[0])
+        elif len (macro) == 2:
+            if macro[1] is None:    # define with no explicit value
+                pp_opts.append ("-D%s" % macro[0])
+            else:
+                # XXX *don't* need to be clever about quoting the
+                # macro value here, because we're going to avoid the
+                # shell at all costs when we spawn the command!
+                pp_opts.append ("-D%s=%s" % macro)
+
+    for dir in include_dirs:
+        pp_opts.append ("-I%s" % dir)
+
+    return pp_opts
+
+
+def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+    """Generate linker options for searching library directories and
+    linking with specific libraries.
+
+    'libraries' and 'library_dirs' are, respectively, lists of library names
+    (not filenames!) and search directories.  Returns a list of command-line
+    options suitable for use with some compiler (depending on the two format
+    strings passed in).
+    """
+    lib_opts = []
+
+    for dir in library_dirs:
+        lib_opts.append(compiler.library_dir_option(dir))
+
+    for dir in runtime_library_dirs:
+        opt = compiler.runtime_library_dir_option(dir)
+        if isinstance(opt, list):
+            lib_opts.extend(opt)
+        else:
+            lib_opts.append(opt)
+
+    # XXX it's important that we *not* remove redundant library mentions!
+    # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+    # resolve all symbols.  I just hope we never have to say "-lfoo obj.o
+    # -lbar" to get things to work -- that's certainly a possibility, but a
+    # pretty nasty way to arrange your C code.
+
+    for lib in libraries:
+        lib_dir, lib_name = os.path.split(lib)
+        if lib_dir != '':
+            lib_file = compiler.find_library_file([lib_dir], lib_name)
+            if lib_file is not None:
+                lib_opts.append(lib_file)
+            else:
+                compiler.warn("no library file corresponding to "
+                              "'%s' found (skipping)" % lib)
+        else:
+            lib_opts.append(compiler.library_option(lib))
+
+    return lib_opts
diff --git a/lib/distutils/distutils/cmd.py b/lib/distutils/distutils/cmd.py
new file mode 100644
index 0000000..9ad5657
--- /dev/null
+++ b/lib/distutils/distutils/cmd.py
@@ -0,0 +1,457 @@
+"""distutils.cmd
+
+Provides the Command class, the base class for the command classes
+in the distutils.command package.
+"""
+
+__revision__ = "$Id$"
+
+import sys, os, re
+from distutils.errors import DistutilsOptionError
+from distutils import util, dir_util, file_util, archive_util, dep_util
+from distutils import log
+
+class Command:
+    """Abstract base class for defining command classes, the "worker bees"
+    of the Distutils.  A useful analogy for command classes is to think of
+    them as subroutines with local variables called "options".  The options
+    are "declared" in 'initialize_options()' and "defined" (given their
+    final values, aka "finalized") in 'finalize_options()', both of which
+    must be defined by every command class.  The distinction between the
+    two is necessary because option values might come from the outside
+    world (command line, config file, ...), and any options dependent on
+    other options must be computed *after* these outside influences have
+    been processed -- hence 'finalize_options()'.  The "body" of the
+    subroutine, where it does all its work based on the values of its
+    options, is the 'run()' method, which must also be implemented by every
+    command class.
+    """
+
+    # 'sub_commands' formalizes the notion of a "family" of commands,
+    # eg. "install" as the parent with sub-commands "install_lib",
+    # "install_headers", etc.  The parent of a family of commands
+    # defines 'sub_commands' as a class attribute; it's a list of
+    #    (command_name : string, predicate : unbound_method | string | None)
+    # tuples, where 'predicate' is a method of the parent command that
+    # determines whether the corresponding command is applicable in the
+    # current situation.  (Eg. we "install_headers" is only applicable if
+    # we have any C header files to install.)  If 'predicate' is None,
+    # that command is always applicable.
+    #
+    # 'sub_commands' is usually defined at the *end* of a class, because
+    # predicates can be unbound methods, so they must already have been
+    # defined.  The canonical example is the "install" command.
+    sub_commands = []
+
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__(self, dist):
+        """Create and initialize a new Command object.  Most importantly,
+        invokes the 'initialize_options()' method, which is the real
+        initializer and depends on the actual command being
+        instantiated.
+        """
+        # late import because of mutual dependence between these classes
+        from distutils.dist import Distribution
+
+        if not isinstance(dist, Distribution):
+            raise TypeError, "dist must be a Distribution instance"
+        if self.__class__ is Command:
+            raise RuntimeError, "Command is an abstract class"
+
+        self.distribution = dist
+        self.initialize_options()
+
+        # Per-command versions of the global flags, so that the user can
+        # customize Distutils' behaviour command-by-command and let some
+        # commands fall back on the Distribution's behaviour.  None means
+        # "not defined, check self.distribution's copy", while 0 or 1 mean
+        # false and true (duh).  Note that this means figuring out the real
+        # value of each flag is a touch complicated -- hence "self._dry_run"
+        # will be handled by __getattr__, below.
+        # XXX This needs to be fixed.
+        self._dry_run = None
+
+        # verbose is largely ignored, but needs to be set for
+        # backwards compatibility (I think)?
+        self.verbose = dist.verbose
+
+        # Some commands define a 'self.force' option to ignore file
+        # timestamps, but methods defined *here* assume that
+        # 'self.force' exists for all commands.  So define it here
+        # just to be safe.
+        self.force = None
+
+        # The 'help' flag is just used for command-line parsing, so
+        # none of that complicated bureaucracy is needed.
+        self.help = 0
+
+        # 'finalized' records whether or not 'finalize_options()' has been
+        # called.  'finalize_options()' itself should not pay attention to
+        # this flag: it is the business of 'ensure_finalized()', which
+        # always calls 'finalize_options()', to respect/update it.
+        self.finalized = 0
+
+    # XXX A more explicit way to customize dry_run would be better.
+    def __getattr__(self, attr):
+        if attr == 'dry_run':
+            myval = getattr(self, "_" + attr)
+            if myval is None:
+                return getattr(self.distribution, attr)
+            else:
+                return myval
+        else:
+            raise AttributeError, attr
+
+    def ensure_finalized(self):
+        if not self.finalized:
+            self.finalize_options()
+        self.finalized = 1
+
+    # Subclasses must define:
+    #   initialize_options()
+    #     provide default values for all options; may be customized by
+    #     setup script, by options from config file(s), or by command-line
+    #     options
+    #   finalize_options()
+    #     decide on the final values for all options; this is called
+    #     after all possible intervention from the outside world
+    #     (command-line, option file, etc.) has been processed
+    #   run()
+    #     run the command: do whatever it is we're here to do,
+    #     controlled by the command's various option values
+
+    def initialize_options(self):
+        """Set default values for all the options that this command
+        supports.  Note that these defaults may be overridden by other
+        commands, by the setup script, by config files, or by the
+        command-line.  Thus, this is not the place to code dependencies
+        between options; generally, 'initialize_options()' implementations
+        are just a bunch of "self.foo = None" assignments.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+    def finalize_options(self):
+        """Set final values for all the options that this command supports.
+        This is always called as late as possible, ie.  after any option
+        assignments from the command-line or from other commands have been
+        done.  Thus, this is the place to code option dependencies: if
+        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+        long as 'foo' still has the same value it was assigned in
+        'initialize_options()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+
+    def dump_options(self, header=None, indent=""):
+        from distutils.fancy_getopt import longopt_xlate
+        if header is None:
+            header = "command options for '%s':" % self.get_command_name()
+        self.announce(indent + header, level=log.INFO)
+        indent = indent + "  "
+        for (option, _, _) in self.user_options:
+            option = option.translate(longopt_xlate)
+            if option[-1] == "=":
+                option = option[:-1]
+            value = getattr(self, option)
+            self.announce(indent + "%s = %s" % (option, value),
+                          level=log.INFO)
+
+    def run(self):
+        """A command's raison d'etre: carry out the action it exists to
+        perform, controlled by the options initialized in
+        'initialize_options()', customized by other commands, the setup
+        script, the command-line, and config files, and finalized in
+        'finalize_options()'.  All terminal output and filesystem
+        interaction should be done by 'run()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+    def announce(self, msg, level=1):
+        """If the current verbosity level is of greater than or equal to
+        'level' print 'msg' to stdout.
+        """
+        log.log(level, msg)
+
+    def debug_print(self, msg):
+        """Print 'msg' to stdout if the global DEBUG (taken from the
+        DISTUTILS_DEBUG environment variable) flag is true.
+        """
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+            sys.stdout.flush()
+
+
+    # -- Option validation methods -------------------------------------
+    # (these are very handy in writing the 'finalize_options()' method)
+    #
+    # NB. the general philosophy here is to ensure that a particular option
+    # value meets certain type and value constraints.  If not, we try to
+    # force it into conformance (eg. if we expect a list but have a string,
+    # split the string on comma and/or whitespace).  If we can't force the
+    # option into conformance, raise DistutilsOptionError.  Thus, command
+    # classes need do nothing more than (eg.)
+    #   self.ensure_string_list('foo')
+    # and they can be guaranteed that thereafter, self.foo will be
+    # a list of strings.
+
+    def _ensure_stringlike(self, option, what, default=None):
+        val = getattr(self, option)
+        if val is None:
+            setattr(self, option, default)
+            return default
+        elif not isinstance(val, str):
+            raise DistutilsOptionError, \
+                  "'%s' must be a %s (got `%s`)" % (option, what, val)
+        return val
+
+    def ensure_string(self, option, default=None):
+        """Ensure that 'option' is a string; if not defined, set it to
+        'default'.
+        """
+        self._ensure_stringlike(option, "string", default)
+
+    def ensure_string_list(self, option):
+        """Ensure that 'option' is a list of strings.  If 'option' is
+        currently a string, we split it either on /,\s*/ or /\s+/, so
+        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
+        ["foo", "bar", "baz"].
+        """
+        val = getattr(self, option)
+        if val is None:
+            return
+        elif isinstance(val, str):
+            setattr(self, option, re.split(r',\s*|\s+', val))
+        else:
+            if isinstance(val, list):
+                # checks if all elements are str
+                ok = 1
+                for element in val:
+                    if not isinstance(element, str):
+                        ok = 0
+                        break
+            else:
+                ok = 0
+
+            if not ok:
+                raise DistutilsOptionError, \
+                    "'%s' must be a list of strings (got %r)" % \
+                        (option, val)
+
+
+    def _ensure_tested_string(self, option, tester,
+                              what, error_fmt, default=None):
+        val = self._ensure_stringlike(option, what, default)
+        if val is not None and not tester(val):
+            raise DistutilsOptionError, \
+                  ("error in '%s' option: " + error_fmt) % (option, val)
+
+    def ensure_filename(self, option):
+        """Ensure that 'option' is the name of an existing file."""
+        self._ensure_tested_string(option, os.path.isfile,
+                                   "filename",
+                                   "'%s' does not exist or is not a file")
+
+    def ensure_dirname(self, option):
+        self._ensure_tested_string(option, os.path.isdir,
+                                   "directory name",
+                                   "'%s' does not exist or is not a directory")
+
+
+    # -- Convenience methods for commands ------------------------------
+
+    def get_command_name(self):
+        if hasattr(self, 'command_name'):
+            return self.command_name
+        else:
+            return self.__class__.__name__
+
+    def set_undefined_options(self, src_cmd, *option_pairs):
+        """Set the values of any "undefined" options from corresponding
+        option values in some other command object.  "Undefined" here means
+        "is None", which is the convention used to indicate that an option
+        has not been changed between 'initialize_options()' and
+        'finalize_options()'.  Usually called from 'finalize_options()' for
+        options that depend on some other command rather than another
+        option of the same command.  'src_cmd' is the other command from
+        which option values will be taken (a command object will be created
+        for it if necessary); the remaining arguments are
+        '(src_option,dst_option)' tuples which mean "take the value of
+        'src_option' in the 'src_cmd' command object, and copy it to
+        'dst_option' in the current command object".
+        """
+
+        # Option_pairs: list of (src_option, dst_option) tuples
+
+        src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+        src_cmd_obj.ensure_finalized()
+        for (src_option, dst_option) in option_pairs:
+            if getattr(self, dst_option) is None:
+                setattr(self, dst_option,
+                        getattr(src_cmd_obj, src_option))
+
+
+    def get_finalized_command(self, command, create=1):
+        """Wrapper around Distribution's 'get_command_obj()' method: find
+        (create if necessary and 'create' is true) the command object for
+        'command', call its 'ensure_finalized()' method, and return the
+        finalized command object.
+        """
+        cmd_obj = self.distribution.get_command_obj(command, create)
+        cmd_obj.ensure_finalized()
+        return cmd_obj
+
+    # XXX rename to 'get_reinitialized_command()'? (should do the
+    # same in dist.py, if so)
+    def reinitialize_command(self, command, reinit_subcommands=0):
+        return self.distribution.reinitialize_command(
+            command, reinit_subcommands)
+
+    def run_command(self, command):
+        """Run some other command: uses the 'run_command()' method of
+        Distribution, which creates and finalizes the command object if
+        necessary and then invokes its 'run()' method.
+        """
+        self.distribution.run_command(command)
+
+    def get_sub_commands(self):
+        """Determine the sub-commands that are relevant in the current
+        distribution (ie., that need to be run).  This is based on the
+        'sub_commands' class attribute: each tuple in that list may include
+        a method that we call to determine if the subcommand needs to be
+        run for the current distribution.  Return a list of command names.
+        """
+        commands = []
+        for (cmd_name, method) in self.sub_commands:
+            if method is None or method(self):
+                commands.append(cmd_name)
+        return commands
+
+
+    # -- External world manipulation -----------------------------------
+
+    def warn(self, msg):
+        log.warn("warning: %s: %s\n" %
+                (self.get_command_name(), msg))
+
+    def execute(self, func, args, msg=None, level=1):
+        util.execute(func, args, msg, dry_run=self.dry_run)
+
+    def mkpath(self, name, mode=0777):
+        dir_util.mkpath(name, mode, dry_run=self.dry_run)
+
+    def copy_file(self, infile, outfile,
+                   preserve_mode=1, preserve_times=1, link=None, level=1):
+        """Copy a file respecting verbose, dry-run and force flags.  (The
+        former two default to whatever is in the Distribution object, and
+        the latter defaults to false for commands that don't define it.)"""
+
+        return file_util.copy_file(
+            infile, outfile,
+            preserve_mode, preserve_times,
+            not self.force,
+            link,
+            dry_run=self.dry_run)
+
+    def copy_tree(self, infile, outfile,
+                   preserve_mode=1, preserve_times=1, preserve_symlinks=0,
+                   level=1):
+        """Copy an entire directory tree respecting verbose, dry-run,
+        and force flags.
+        """
+        return dir_util.copy_tree(
+            infile, outfile,
+            preserve_mode,preserve_times,preserve_symlinks,
+            not self.force,
+            dry_run=self.dry_run)
+
+    def move_file (self, src, dst, level=1):
+        """Move a file respecting dry-run flag."""
+        return file_util.move_file(src, dst, dry_run = self.dry_run)
+
+    def spawn (self, cmd, search_path=1, level=1):
+        """Spawn an external command respecting dry-run flag."""
+        from distutils.spawn import spawn
+        spawn(cmd, search_path, dry_run= self.dry_run)
+
+    def make_archive(self, base_name, format, root_dir=None, base_dir=None,
+                     owner=None, group=None):
+        return archive_util.make_archive(base_name, format, root_dir,
+                                         base_dir, dry_run=self.dry_run,
+                                         owner=owner, group=group)
+
+    def make_file(self, infiles, outfile, func, args,
+                  exec_msg=None, skip_msg=None, level=1):
+        """Special case of 'execute()' for operations that process one or
+        more input files and generate one output file.  Works just like
+        'execute()', except the operation is skipped and a different
+        message printed if 'outfile' already exists and is newer than all
+        files listed in 'infiles'.  If the command defined 'self.force',
+        and it is true, then the command is unconditionally run -- does no
+        timestamp checks.
+        """
+        if skip_msg is None:
+            skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+        # Allow 'infiles' to be a single string
+        if isinstance(infiles, str):
+            infiles = (infiles,)
+        elif not isinstance(infiles, (list, tuple)):
+            raise TypeError, \
+                  "'infiles' must be a string, or a list or tuple of strings"
+
+        if exec_msg is None:
+            exec_msg = "generating %s from %s" % \
+                       (outfile, ', '.join(infiles))
+
+        # If 'outfile' must be regenerated (either because it doesn't
+        # exist, is out-of-date, or the 'force' flag is true) then
+        # perform the action that presumably regenerates it
+        if self.force or dep_util.newer_group(infiles, outfile):
+            self.execute(func, args, exec_msg, level)
+
+        # Otherwise, print the "skip" message
+        else:
+            log.debug(skip_msg)
+
+# XXX 'install_misc' class not currently used -- it was the base class for
+# both 'install_scripts' and 'install_data', but they outgrew it.  It might
+# still be useful for 'install_headers', though, so I'm keeping it around
+# for the time being.
+
+class install_misc(Command):
+    """Common base class for installing some files in a subdirectory.
+    Currently used by install_data and install_scripts.
+    """
+
+    user_options = [('install-dir=', 'd', "directory to install the files to")]
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.outfiles = []
+
+    def _install_dir_from(self, dirname):
+        self.set_undefined_options('install', (dirname, 'install_dir'))
+
+    def _copy_files(self, filelist):
+        self.outfiles = []
+        if not filelist:
+            return
+        self.mkpath(self.install_dir)
+        for f in filelist:
+            self.copy_file(f, self.install_dir)
+            self.outfiles.append(os.path.join(self.install_dir, f))
+
+    def get_outputs(self):
+        return self.outfiles
diff --git a/lib/distutils/distutils/config.py b/lib/distutils/distutils/config.py
new file mode 100644
index 0000000..afa403f
--- /dev/null
+++ b/lib/distutils/distutils/config.py
@@ -0,0 +1,121 @@
+"""distutils.pypirc
+
+Provides the PyPIRCCommand class, the base class for the command classes
+that uses .pypirc in the distutils.command package.
+"""
+import os
+from ConfigParser import ConfigParser
+
+from distutils.cmd import Command
+
+DEFAULT_PYPIRC = """\
+[distutils]
+index-servers =
+    pypi
+
+[pypi]
+username:%s
+password:%s
+"""
+
+class PyPIRCCommand(Command):
+    """Base command that knows how to handle the .pypirc file
+    """
+    DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+    DEFAULT_REALM = 'pypi'
+    repository = None
+    realm = None
+
+    user_options = [
+        ('repository=', 'r',
+         "url of repository [default: %s]" % \
+            DEFAULT_REPOSITORY),
+        ('show-response', None,
+         'display full response text from server')]
+
+    boolean_options = ['show-response']
+
+    def _get_rc_file(self):
+        """Returns rc file path."""
+        return os.path.join(os.path.expanduser('~'), '.pypirc')
+
+    def _store_pypirc(self, username, password):
+        """Creates a default .pypirc file."""
+        rc = self._get_rc_file()
+        f = open(rc, 'w')
+        try:
+            f.write(DEFAULT_PYPIRC % (username, password))
+        finally:
+            f.close()
+        try:
+            os.chmod(rc, 0600)
+        except OSError:
+            # should do something better here
+            pass
+
+    def _read_pypirc(self):
+        """Reads the .pypirc file."""
+        rc = self._get_rc_file()
+        if os.path.exists(rc):
+            self.announce('Using PyPI login from %s' % rc)
+            repository = self.repository or self.DEFAULT_REPOSITORY
+            config = ConfigParser()
+            config.read(rc)
+            sections = config.sections()
+            if 'distutils' in sections:
+                # let's get the list of servers
+                index_servers = config.get('distutils', 'index-servers')
+                _servers = [server.strip() for server in
+                            index_servers.split('\n')
+                            if server.strip() != '']
+                if _servers == []:
+                    # nothing set, let's try to get the default pypi
+                    if 'pypi' in sections:
+                        _servers = ['pypi']
+                    else:
+                        # the file is not properly defined, returning
+                        # an empty dict
+                        return {}
+                for server in _servers:
+                    current = {'server': server}
+                    current['username'] = config.get(server, 'username')
+
+                    # optional params
+                    for key, default in (('repository',
+                                          self.DEFAULT_REPOSITORY),
+                                         ('realm', self.DEFAULT_REALM),
+                                         ('password', None)):
+                        if config.has_option(server, key):
+                            current[key] = config.get(server, key)
+                        else:
+                            current[key] = default
+                    if (current['server'] == repository or
+                        current['repository'] == repository):
+                        return current
+            elif 'server-login' in sections:
+                # old format
+                server = 'server-login'
+                if config.has_option(server, 'repository'):
+                    repository = config.get(server, 'repository')
+                else:
+                    repository = self.DEFAULT_REPOSITORY
+                return {'username': config.get(server, 'username'),
+                        'password': config.get(server, 'password'),
+                        'repository': repository,
+                        'server': server,
+                        'realm': self.DEFAULT_REALM}
+
+        return {}
+
+    def initialize_options(self):
+        """Initialize options."""
+        self.repository = None
+        self.realm = None
+        self.show_response = 0
+
+    def finalize_options(self):
+        """Finalizes options."""
+        if self.repository is None:
+            self.repository = self.DEFAULT_REPOSITORY
+        if self.realm is None:
+            self.realm = self.DEFAULT_REALM
diff --git a/lib/distutils/distutils/core.py b/lib/distutils/distutils/core.py
new file mode 100644
index 0000000..b89557d
--- /dev/null
+++ b/lib/distutils/distutils/core.py
@@ -0,0 +1,242 @@
+"""distutils.core
+
+The only module that needs to be imported to use the Distutils; provides
+the 'setup' function (which is to be called from the setup script).  Also
+indirectly provides the Distribution and Command classes, although they are
+really defined in distutils.dist and distutils.cmd.
+"""
+
+__revision__ = "$Id$"
+
+import sys
+import os
+
+from distutils.debug import DEBUG
+from distutils.errors import (DistutilsSetupError, DistutilsArgError,
+                              DistutilsError, CCompilerError)
+from distutils.util import grok_environment_error
+
+# Mainly import these so setup scripts can "from distutils.core import" them.
+from distutils.dist import Distribution
+from distutils.cmd import Command
+from distutils.config import PyPIRCCommand
+from distutils.extension import Extension
+
+# This is a barebones help message generated displayed when the user
+# runs the setup script with no arguments at all.  More useful help
+# is generated with various --help options: global help, list commands,
+# and per-command help.
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: %(script)s --help [cmd1 cmd2 ...]
+   or: %(script)s --help-commands
+   or: %(script)s cmd --help
+"""
+
+def gen_usage(script_name):
+    script = os.path.basename(script_name)
+    return USAGE % {'script': script}
+
+
+# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
+_setup_stop_after = None
+_setup_distribution = None
+
+# Legal keyword arguments for the setup() function
+setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
+                  'name', 'version', 'author', 'author_email',
+                  'maintainer', 'maintainer_email', 'url', 'license',
+                  'description', 'long_description', 'keywords',
+                  'platforms', 'classifiers', 'download_url',
+                  'requires', 'provides', 'obsoletes',
+                  )
+
+# Legal keyword arguments for the Extension constructor
+extension_keywords = ('name', 'sources', 'include_dirs',
+                      'define_macros', 'undef_macros',
+                      'library_dirs', 'libraries', 'runtime_library_dirs',
+                      'extra_objects', 'extra_compile_args', 'extra_link_args',
+                      'swig_opts', 'export_symbols', 'depends', 'language')
+
+def setup(**attrs):
+    """The gateway to the Distutils: do everything your setup script needs
+    to do, in a highly flexible and user-driven way.  Briefly: create a
+    Distribution instance; find and parse config files; parse the command
+    line; run each Distutils command found there, customized by the options
+    supplied to 'setup()' (as keyword arguments), in config files, and on
+    the command line.
+
+    The Distribution instance might be an instance of a class supplied via
+    the 'distclass' keyword argument to 'setup'; if no such class is
+    supplied, then the Distribution class (in dist.py) is instantiated.
+    All other arguments to 'setup' (except for 'cmdclass') are used to set
+    attributes of the Distribution instance.
+
+    The 'cmdclass' argument, if supplied, is a dictionary mapping command
+    names to command classes.  Each command encountered on the command line
+    will be turned into a command class, which is in turn instantiated; any
+    class found in 'cmdclass' is used in place of the default, which is
+    (for command 'foo_bar') class 'foo_bar' in module
+    'distutils.command.foo_bar'.  The command class must provide a
+    'user_options' attribute which is a list of option specifiers for
+    'distutils.fancy_getopt'.  Any command-line options between the current
+    and the next command are used to set attributes of the current command
+    object.
+
+    When the entire command-line has been successfully parsed, calls the
+    'run()' method on each command object in turn.  This method will be
+    driven entirely by the Distribution object (which each command object
+    has a reference to, thanks to its constructor), and the
+    command-specific options that became attributes of each command
+    object.
+    """
+
+    global _setup_stop_after, _setup_distribution
+
+    # Determine the distribution class -- either caller-supplied or
+    # our Distribution (see below).
+    klass = attrs.get('distclass')
+    if klass:
+        del attrs['distclass']
+    else:
+        klass = Distribution
+
+    if 'script_name' not in attrs:
+        attrs['script_name'] = os.path.basename(sys.argv[0])
+    if 'script_args' not in attrs:
+        attrs['script_args'] = sys.argv[1:]
+
+    # Create the Distribution instance, using the remaining arguments
+    # (ie. everything except distclass) to initialize it
+    try:
+        _setup_distribution = dist = klass(attrs)
+    except DistutilsSetupError, msg:
+        if 'name' in attrs:
+            raise SystemExit, "error in %s setup command: %s" % \
+                  (attrs['name'], msg)
+        else:
+            raise SystemExit, "error in setup command: %s" % msg
+
+    if _setup_stop_after == "init":
+        return dist
+
+    # Find and parse the config file(s): they will override options from
+    # the setup script, but be overridden by the command line.
+    dist.parse_config_files()
+
+    if DEBUG:
+        print "options (after parsing config files):"
+        dist.dump_option_dicts()
+
+    if _setup_stop_after == "config":
+        return dist
+
+    # Parse the command line and override config files; any
+    # command-line errors are the end user's fault, so turn them into
+    # SystemExit to suppress tracebacks.
+    try:
+        ok = dist.parse_command_line()
+    except DistutilsArgError, msg:
+        raise SystemExit, gen_usage(dist.script_name) + "\nerror: %s" % msg
+
+    if DEBUG:
+        print "options (after parsing command line):"
+        dist.dump_option_dicts()
+
+    if _setup_stop_after == "commandline":
+        return dist
+
+    # And finally, run all the commands found on the command line.
+    if ok:
+        try:
+            dist.run_commands()
+        except KeyboardInterrupt:
+            raise SystemExit, "interrupted"
+        except (IOError, os.error), exc:
+            error = grok_environment_error(exc)
+
+            if DEBUG:
+                sys.stderr.write(error + "\n")
+                raise
+            else:
+                raise SystemExit, error
+
+        except (DistutilsError,
+                CCompilerError), msg:
+            if DEBUG:
+                raise
+            else:
+                raise SystemExit, "error: " + str(msg)
+
+    return dist
+
+
+def run_setup(script_name, script_args=None, stop_after="run"):
+    """Run a setup script in a somewhat controlled environment, and
+    return the Distribution instance that drives things.  This is useful
+    if you need to find out the distribution meta-data (passed as
+    keyword args from 'script' to 'setup()', or the contents of the
+    config files or command-line.
+
+    'script_name' is a file that will be run with 'execfile()';
+    'sys.argv[0]' will be replaced with 'script' for the duration of the
+    call.  'script_args' is a list of strings; if supplied,
+    'sys.argv[1:]' will be replaced by 'script_args' for the duration of
+    the call.
+
+    'stop_after' tells 'setup()' when to stop processing; possible
+    values:
+      init
+        stop after the Distribution instance has been created and
+        populated with the keyword arguments to 'setup()'
+      config
+        stop after config files have been parsed (and their data
+        stored in the Distribution instance)
+      commandline
+        stop after the command-line ('sys.argv[1:]' or 'script_args')
+        have been parsed (and the data stored in the Distribution)
+      run [default]
+        stop after all commands have been run (the same as if 'setup()'
+        had been called in the usual way
+
+    Returns the Distribution instance, which provides all information
+    used to drive the Distutils.
+    """
+    if stop_after not in ('init', 'config', 'commandline', 'run'):
+        raise ValueError, "invalid value for 'stop_after': %r" % (stop_after,)
+
+    global _setup_stop_after, _setup_distribution
+    _setup_stop_after = stop_after
+
+    save_argv = sys.argv
+    g = {'__file__': script_name}
+    l = {}
+    try:
+        try:
+            sys.argv[0] = script_name
+            if script_args is not None:
+                sys.argv[1:] = script_args
+            f = open(script_name)
+            try:
+                exec f.read() in g, l
+            finally:
+                f.close()
+        finally:
+            sys.argv = save_argv
+            _setup_stop_after = None
+    except SystemExit:
+        # Hmm, should we do something if exiting with a non-zero code
+        # (ie. error)?
+        pass
+    except:
+        raise
+
+    if _setup_distribution is None:
+        raise RuntimeError, \
+              ("'distutils.core.setup()' was never called -- "
+               "perhaps '%s' is not a Distutils setup script?") % \
+              script_name
+
+    # I wonder if the setup script's namespace -- g and l -- would be of
+    # any interest to callers?
+    return _setup_distribution
diff --git a/lib/distutils/distutils/cygwinccompiler.py b/lib/distutils/distutils/cygwinccompiler.py
new file mode 100644
index 0000000..a1ee815
--- /dev/null
+++ b/lib/distutils/distutils/cygwinccompiler.py
@@ -0,0 +1,449 @@
+"""distutils.cygwinccompiler
+
+Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
+handles the Cygwin port of the GNU C compiler to Windows.  It also contains
+the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
+cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+#   1. you have to insert a __GNUC__ section in its config.h
+#   2. you have to generate a import library for its dll
+#      - create a def-file for python??.dll
+#      - create a import library using
+#             dlltool --dllname python15.dll --def python15.def \
+#                       --output-lib libpython15.a
+#
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+#   --export-all-symbols because it doesn't worked reliable in some
+#   tested configurations. And because other windows compilers also
+#   need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+#   (after patching python's config.h and for C++ some other include files)
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+#   (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+#   - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+#     see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+#   - using gcc -mdll instead dllwrap doesn't work without -static because
+#     it tries to link against dlls instead their import libraries. (If
+#     it finds the dll first.)
+#     By specifying -static we force ld to link against the import libraries,
+#     this is windows standard and there are normally not the necessary symbols
+#     in the dlls.
+#   *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+#   (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+#   (ld supports -shared)
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import os,sys,copy
+from distutils.ccompiler import gen_preprocess_options, gen_lib_options
+from distutils.unixccompiler import UnixCCompiler
+from distutils.file_util import write_file
+from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
+from distutils import log
+
+def get_msvcr():
+    """Include the appropriate MSVC runtime library if Python was built
+    with MSVC 7.0 or later.
+    """
+    msc_pos = sys.version.find('MSC v.')
+    if msc_pos != -1:
+        msc_ver = sys.version[msc_pos+6:msc_pos+10]
+        if msc_ver == '1300':
+            # MSVC 7.0
+            return ['msvcr70']
+        elif msc_ver == '1310':
+            # MSVC 7.1
+            return ['msvcr71']
+        elif msc_ver == '1400':
+            # VS2005 / MSVC 8.0
+            return ['msvcr80']
+        elif msc_ver == '1500':
+            # VS2008 / MSVC 9.0
+            return ['msvcr90']
+        else:
+            raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+
+
+class CygwinCCompiler (UnixCCompiler):
+
+    compiler_type = 'cygwin'
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".dll"
+    static_lib_format = "lib%s%s"
+    shared_lib_format = "%s%s"
+    exe_extension = ".exe"
+
+    def __init__ (self, verbose=0, dry_run=0, force=0):
+
+        UnixCCompiler.__init__ (self, verbose, dry_run, force)
+
+        (status, details) = check_config_h()
+        self.debug_print("Python's GCC status: %s (details: %s)" %
+                         (status, details))
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler. "
+                "Reason: %s. "
+                "Compiling may fail because of undefined preprocessor macros."
+                % details)
+
+        self.gcc_version, self.ld_version, self.dllwrap_version = \
+            get_versions()
+        self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
+                         (self.gcc_version,
+                          self.ld_version,
+                          self.dllwrap_version) )
+
+        # ld_version >= "2.10.90" and < "2.13" should also be able to use
+        # gcc -mdll instead of dllwrap
+        # Older dllwraps had own version numbers, newer ones use the
+        # same as the rest of binutils ( also ld )
+        # dllwrap 2.10.90 is buggy
+        if self.ld_version >= "2.10.90":
+            self.linker_dll = "gcc"
+        else:
+            self.linker_dll = "dllwrap"
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                             compiler_so='gcc -mcygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mcygwin -O -Wall',
+                             linker_exe='gcc -mcygwin',
+                             linker_so=('%s -mcygwin %s' %
+                                        (self.linker_dll, shared_option)))
+
+        # cygwin and mingw32 need different sets of libraries
+        if self.gcc_version == "2.91.57":
+            # cygwin shouldn't need msvcrt, but without the dlls will crash
+            # (gcc version 2.91.57) -- perhaps something about initialization
+            self.dll_libraries=["msvcrt"]
+            self.warn(
+                "Consider upgrading to a newer version of gcc")
+        else:
+            # Include the appropriate MSVC runtime library if Python was built
+            # with MSVC 7.0 or later.
+            self.dll_libraries = get_msvcr()
+
+    # __init__ ()
+
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        if ext == '.rc' or ext == '.res':
+            # gcc needs '.res' and '.rc' compiled to object files !!!
+            try:
+                self.spawn(["windres", "-i", src, "-o", obj])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            (dll_name, dll_extension) = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+            lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s" % os.path.basename(output_filename),
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append(sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+
+            # dllwrap uses different options than gcc/ld
+            if self.linker_dll == "dllwrap":
+                extra_preargs.extend(["--output-lib", lib_file])
+                # for dllwrap we have to use a special option
+                extra_preargs.extend(["--def", def_file])
+            # we use gcc/ld here and can be sure ld is >= 2.9.10
+            else:
+                # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+                #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
+                # for gcc/ld the def-file is specified as any object files
+                objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self,
+                           target_desc,
+                           objects,
+                           output_filename,
+                           output_dir,
+                           libraries,
+                           library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug,
+                           extra_preargs,
+                           extra_postargs,
+                           build_temp,
+                           target_lang)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.res' or ext == '.rc':
+                # these need to be compiled to object files
+                obj_names.append (os.path.join (output_dir,
+                                            base + ext + self.obj_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+# class CygwinCCompiler
+
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler (CygwinCCompiler):
+
+    compiler_type = 'mingw32'
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        CygwinCCompiler.__init__ (self, verbose, dry_run, force)
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # A real mingw32 doesn't need to specify a different entry point,
+        # but cygwin 2.91.57 in no-cygwin-mode needs it.
+        if self.gcc_version <= "2.91.57":
+            entry_point = '--entry _DllMain@12'
+        else:
+            entry_point = ''
+
+        self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
+                             compiler_so='gcc -mno-cygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mno-cygwin -O -Wall',
+                             linker_exe='gcc -mno-cygwin',
+                             linker_so='%s -mno-cygwin %s %s'
+                                        % (self.linker_dll, shared_option,
+                                           entry_point))
+        # Maybe we should also append -mthreads, but then the finished
+        # dlls need another dll (mingwm10.dll see Mingw32 docs)
+        # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+        # no additional libraries needed
+        self.dll_libraries=[]
+
+        # Include the appropriate MSVC runtime library if Python was built
+        # with MSVC 7.0 or later.
+        self.dll_libraries = get_msvcr()
+
+    # __init__ ()
+
+# class Mingw32CCompiler
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+
+    """Check if the current Python installation (specifically, pyconfig.h)
+    appears amenable to building extensions with GCC.  Returns a tuple
+    (status, details), where 'status' is one of the following constants:
+      CONFIG_H_OK
+        all is well, go ahead and compile
+      CONFIG_H_NOTOK
+        doesn't look good
+      CONFIG_H_UNCERTAIN
+        not sure -- unable to read pyconfig.h
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+
+    from distutils import sysconfig
+    import string
+    # if sys.version contains GCC then python was compiled with
+    # GCC, and the pyconfig.h file should be OK
+    if string.find(sys.version,"GCC") >= 0:
+        return (CONFIG_H_OK, "sys.version mentions 'GCC'")
+
+    fn = sysconfig.get_config_h_filename()
+    try:
+        # It would probably better to read single lines to search.
+        # But we do this only once, and it is fast enough
+        f = open(fn)
+        try:
+            s = f.read()
+        finally:
+            f.close()
+
+    except IOError, exc:
+        # if we can't read this file, we cannot say it is wrong
+        # the compiler will complain later about this file as missing
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
+
+    else:
+        # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
+        if string.find(s,"__GNUC__") >= 0:
+            return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
+        else:
+            return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
+
+
+
+def get_versions():
+    """ Try to find out the versions of gcc, ld and dllwrap.
+        If not possible it returns None for it.
+    """
+    from distutils.version import LooseVersion
+    from distutils.spawn import find_executable
+    import re
+
+    gcc_exe = find_executable('gcc')
+    if gcc_exe:
+        out = os.popen(gcc_exe + ' -dumpversion','r')
+        out_string = out.read()
+        out.close()
+        result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            gcc_version = LooseVersion(result.group(1))
+        else:
+            gcc_version = None
+    else:
+        gcc_version = None
+    ld_exe = find_executable('ld')
+    if ld_exe:
+        out = os.popen(ld_exe + ' -v','r')
+        out_string = out.read()
+        out.close()
+        result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            ld_version = LooseVersion(result.group(1))
+        else:
+            ld_version = None
+    else:
+        ld_version = None
+    dllwrap_exe = find_executable('dllwrap')
+    if dllwrap_exe:
+        out = os.popen(dllwrap_exe + ' --version','r')
+        out_string = out.read()
+        out.close()
+        result = re.search(' (\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            dllwrap_version = LooseVersion(result.group(1))
+        else:
+            dllwrap_version = None
+    else:
+        dllwrap_version = None
+    return (gcc_version, ld_version, dllwrap_version)
diff --git a/lib/distutils/distutils/debug.py b/lib/distutils/distutils/debug.py
new file mode 100644
index 0000000..2886744
--- /dev/null
+++ b/lib/distutils/distutils/debug.py
@@ -0,0 +1,7 @@
+import os
+
+__revision__ = "$Id$"
+
+# If DISTUTILS_DEBUG is anything other than the empty string, we run in
+# debug mode.
+DEBUG = os.environ.get('DISTUTILS_DEBUG')
diff --git a/lib/distutils/distutils/dep_util.py b/lib/distutils/distutils/dep_util.py
new file mode 100644
index 0000000..4e40df6
--- /dev/null
+++ b/lib/distutils/distutils/dep_util.py
@@ -0,0 +1,88 @@
+"""distutils.dep_util
+
+Utility functions for simple, timestamp-based dependency of files
+and groups of files; also, function based entirely on such
+timestamp dependency analysis."""
+
+__revision__ = "$Id$"
+
+import os
+from distutils.errors import DistutilsFileError
+
+def newer(source, target):
+    """Tells if the target is newer than the source.
+
+    Return true if 'source' exists and is more recently modified than
+    'target', or if 'source' exists and 'target' doesn't.
+
+    Return false if both exist and 'target' is the same age or younger
+    than 'source'. Raise DistutilsFileError if 'source' does not exist.
+
+    Note that this test is not very accurate: files created in the same second
+    will have the same "age".
+    """
+    if not os.path.exists(source):
+        raise DistutilsFileError("file '%s' does not exist" %
+                                 os.path.abspath(source))
+    if not os.path.exists(target):
+        return True
+
+    return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+def newer_pairwise(sources, targets):
+    """Walk two filename lists in parallel, testing if each source is newer
+    than its corresponding target.  Return a pair of lists (sources,
+    targets) where source is newer than target, according to the semantics
+    of 'newer()'.
+    """
+    if len(sources) != len(targets):
+        raise ValueError, "'sources' and 'targets' must be same length"
+
+    # build a pair of lists (sources, targets) where  source is newer
+    n_sources = []
+    n_targets = []
+    for source, target in zip(sources, targets):
+        if newer(source, target):
+            n_sources.append(source)
+            n_targets.append(target)
+
+    return n_sources, n_targets
+
+def newer_group(sources, target, missing='error'):
+    """Return true if 'target' is out-of-date with respect to any file
+    listed in 'sources'.
+
+    In other words, if 'target' exists and is newer
+    than every file in 'sources', return false; otherwise return true.
+    'missing' controls what we do when a source file is missing; the
+    default ("error") is to blow up with an OSError from inside 'stat()';
+    if it is "ignore", we silently drop any missing source files; if it is
+    "newer", any missing source files make us assume that 'target' is
+    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+    carry out commands that wouldn't work because inputs are missing, but
+    that doesn't matter because you're not actually going to run the
+    commands).
+    """
+    # If the target doesn't even exist, then it's definitely out-of-date.
+    if not os.path.exists(target):
+        return True
+
+    # Otherwise we have to find out the hard way: if *any* source file
+    # is more recent than 'target', then 'target' is out-of-date and
+    # we can immediately return true.  If we fall through to the end
+    # of the loop, then 'target' is up-to-date and we return false.
+    target_mtime = os.stat(target).st_mtime
+
+    for source in sources:
+        if not os.path.exists(source):
+            if missing == 'error':      # blow up when we stat() the file
+                pass
+            elif missing == 'ignore':   # missing source dropped from
+                continue                #  target's dependency list
+            elif missing == 'newer':    # missing source means target is
+                return True             #  out-of-date
+
+        if os.stat(source).st_mtime > target_mtime:
+            return True
+
+    return False
diff --git a/lib/distutils/distutils/dir_util.py b/lib/distutils/distutils/dir_util.py
new file mode 100644
index 0000000..9c5cf33
--- /dev/null
+++ b/lib/distutils/distutils/dir_util.py
@@ -0,0 +1,212 @@
+"""distutils.dir_util
+
+Utility functions for manipulating directories and directory trees."""
+
+__revision__ = "$Id$"
+
+import os
+import errno
+from distutils.errors import DistutilsFileError, DistutilsInternalError
+from distutils import log
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = {}
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def mkpath(name, mode=0777, verbose=1, dry_run=0):
+    """Create a directory and any missing ancestor directories.
+
+    If the directory already exists (or if 'name' is the empty string, which
+    means the current directory, which of course exists), then do nothing.
+    Raise DistutilsFileError if unable to create some directory along the way
+    (eg. some sub-path exists, but is a file rather than a directory).
+    If 'verbose' is true, print a one-line summary of each mkdir to stdout.
+    Return the list of directories actually created.
+    """
+
+    global _path_created
+
+    # Detect a common bug -- name is None
+    if not isinstance(name, basestring):
+        raise DistutilsInternalError, \
+              "mkpath: 'name' must be a string (got %r)" % (name,)
+
+    # XXX what's the better way to handle verbosity? print as we create
+    # each directory in the path (the current behaviour), or only announce
+    # the creation of the whole path? (quite easy to do the latter since
+    # we're not using a recursive algorithm)
+
+    name = os.path.normpath(name)
+    created_dirs = []
+    if os.path.isdir(name) or name == '':
+        return created_dirs
+    if _path_created.get(os.path.abspath(name)):
+        return created_dirs
+
+    (head, tail) = os.path.split(name)
+    tails = [tail]                      # stack of lone dirs to create
+
+    while head and tail and not os.path.isdir(head):
+        (head, tail) = os.path.split(head)
+        tails.insert(0, tail)          # push next higher dir onto stack
+
+    # now 'head' contains the deepest directory that already exists
+    # (that is, the child of 'head' in 'name' is the highest directory
+    # that does *not* exist)
+    for d in tails:
+        #print "head = %s, d = %s: " % (head, d),
+        head = os.path.join(head, d)
+        abs_head = os.path.abspath(head)
+
+        if _path_created.get(abs_head):
+            continue
+
+        if verbose >= 1:
+            log.info("creating %s", head)
+
+        if not dry_run:
+            try:
+                os.mkdir(head, mode)
+            except OSError, exc:
+                if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
+                    raise DistutilsFileError(
+                          "could not create '%s': %s" % (head, exc.args[-1]))
+            created_dirs.append(head)
+
+        _path_created[abs_head] = 1
+    return created_dirs
+
+def create_tree(base_dir, files, mode=0777, verbose=1, dry_run=0):
+    """Create all the empty directories under 'base_dir' needed to put 'files'
+    there.
+
+    'base_dir' is just the a name of a directory which doesn't necessarily
+    exist yet; 'files' is a list of filenames to be interpreted relative to
+    'base_dir'.  'base_dir' + the directory portion of every file in 'files'
+    will be created if it doesn't already exist.  'mode', 'verbose' and
+    'dry_run' flags are as for 'mkpath()'.
+    """
+    # First get the list of directories to create
+    need_dir = {}
+    for file in files:
+        need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1
+    need_dirs = need_dir.keys()
+    need_dirs.sort()
+
+    # Now create them
+    for dir in need_dirs:
+        mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
+
+def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
+              preserve_symlinks=0, update=0, verbose=1, dry_run=0):
+    """Copy an entire directory tree 'src' to a new location 'dst'.
+
+    Both 'src' and 'dst' must be directory names.  If 'src' is not a
+    directory, raise DistutilsFileError.  If 'dst' does not exist, it is
+    created with 'mkpath()'.  The end result of the copy is that every
+    file in 'src' is copied to 'dst', and directories under 'src' are
+    recursively copied to 'dst'.  Return the list of files that were
+    copied or might have been copied, using their output name.  The
+    return value is unaffected by 'update' or 'dry_run': it is simply
+    the list of all files under 'src', with the names changed to be
+    under 'dst'.
+
+    'preserve_mode' and 'preserve_times' are the same as for
+    'copy_file'; note that they only apply to regular files, not to
+    directories.  If 'preserve_symlinks' is true, symlinks will be
+    copied as symlinks (on platforms that support them!); otherwise
+    (the default), the destination of the symlink will be copied.
+    'update' and 'verbose' are the same as for 'copy_file'.
+    """
+    from distutils.file_util import copy_file
+
+    if not dry_run and not os.path.isdir(src):
+        raise DistutilsFileError, \
+              "cannot copy tree '%s': not a directory" % src
+    try:
+        names = os.listdir(src)
+    except os.error, (errno, errstr):
+        if dry_run:
+            names = []
+        else:
+            raise DistutilsFileError, \
+                  "error listing files in '%s': %s" % (src, errstr)
+
+    if not dry_run:
+        mkpath(dst, verbose=verbose)
+
+    outputs = []
+
+    for n in names:
+        src_name = os.path.join(src, n)
+        dst_name = os.path.join(dst, n)
+
+        if preserve_symlinks and os.path.islink(src_name):
+            link_dest = os.readlink(src_name)
+            if verbose >= 1:
+                log.info("linking %s -> %s", dst_name, link_dest)
+            if not dry_run:
+                os.symlink(link_dest, dst_name)
+            outputs.append(dst_name)
+
+        elif os.path.isdir(src_name):
+            outputs.extend(
+                copy_tree(src_name, dst_name, preserve_mode,
+                          preserve_times, preserve_symlinks, update,
+                          verbose=verbose, dry_run=dry_run))
+        else:
+            copy_file(src_name, dst_name, preserve_mode,
+                      preserve_times, update, verbose=verbose,
+                      dry_run=dry_run)
+            outputs.append(dst_name)
+
+    return outputs
+
+def _build_cmdtuple(path, cmdtuples):
+    """Helper for remove_tree()."""
+    for f in os.listdir(path):
+        real_f = os.path.join(path,f)
+        if os.path.isdir(real_f) and not os.path.islink(real_f):
+            _build_cmdtuple(real_f, cmdtuples)
+        else:
+            cmdtuples.append((os.remove, real_f))
+    cmdtuples.append((os.rmdir, path))
+
+def remove_tree(directory, verbose=1, dry_run=0):
+    """Recursively remove an entire directory tree.
+
+    Any errors are ignored (apart from being reported to stdout if 'verbose'
+    is true).
+    """
+    from distutils.util import grok_environment_error
+    global _path_created
+
+    if verbose >= 1:
+        log.info("removing '%s' (and everything under it)", directory)
+    if dry_run:
+        return
+    cmdtuples = []
+    _build_cmdtuple(directory, cmdtuples)
+    for cmd in cmdtuples:
+        try:
+            cmd[0](cmd[1])
+            # remove dir from cache if it's already there
+            abspath = os.path.abspath(cmd[1])
+            if abspath in _path_created:
+                del _path_created[abspath]
+        except (IOError, OSError), exc:
+            log.warn(grok_environment_error(
+                    exc, "error removing %s: " % directory))
+
+def ensure_relative(path):
+    """Take the full path 'path', and make it a relative path.
+
+    This is useful to make 'path' the second argument to os.path.join().
+    """
+    drive, path = os.path.splitdrive(path)
+    if path[0:1] == os.sep:
+        path = drive + path[1:]
+    return path
diff --git a/lib/distutils/distutils/dist.py b/lib/distutils/distutils/dist.py
new file mode 100644
index 0000000..597909e
--- /dev/null
+++ b/lib/distutils/distutils/dist.py
@@ -0,0 +1,1248 @@
+"""distutils.dist
+
+Provides the Distribution class, which represents the module distribution
+being built/installed/distributed.
+"""
+
+__revision__ = "$Id$"
+
+import sys, os, re
+from email import message_from_file
+
+try:
+    import warnings
+except ImportError:
+    warnings = None
+
+from distutils.errors import (DistutilsOptionError, DistutilsArgError,
+                              DistutilsModuleError, DistutilsClassError)
+from distutils.fancy_getopt import FancyGetopt, translate_longopt
+from distutils.util import check_environ, strtobool, rfc822_escape
+from distutils import log
+from distutils.debug import DEBUG
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# Regex to define acceptable Distutils command names.  This is not *quite*
+# the same as a Python NAME -- I don't allow leading underscores.  The fact
+# that they're very similar is no coincidence; the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile (r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+
+class Distribution:
+    """The core of the Distutils.  Most of the work hiding behind 'setup'
+    is really done within a Distribution instance, which farms the work out
+    to the Distutils commands specified on the command line.
+
+    Setup scripts will almost never instantiate Distribution directly,
+    unless the 'setup()' function is totally inadequate to their needs.
+    However, it is conceivable that a setup script might wish to subclass
+    Distribution for some specialized purpose, and then pass the subclass
+    to 'setup()' as the 'distclass' keyword argument.  If so, it is
+    necessary to respect the expectations that 'setup' has of Distribution.
+    See the code for 'setup()', in core.py, for details.
+    """
+
+
+    # 'global_options' describes the command-line options that may be
+    # supplied to the setup script prior to any actual commands.
+    # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
+    # these global options.  This list should be kept to a bare minimum,
+    # since every global option is also valid as a command option -- and we
+    # don't want to pollute the commands with too many options that they
+    # have minimal control over.
+    # The fourth entry for verbose means that it can be repeated.
+    global_options = [('verbose', 'v', "run verbosely (default)", 1),
+                      ('quiet', 'q', "run quietly (turns verbosity off)"),
+                      ('dry-run', 'n', "don't actually do anything"),
+                      ('help', 'h', "show detailed help message"),
+                      ('no-user-cfg', None,
+                       'ignore pydistutils.cfg in your home directory'),
+    ]
+
+    # 'common_usage' is a short (2-3 line) string describing the common
+    # usage of the setup script.
+    common_usage = """\
+Common commands: (see '--help-commands' for more)
+
+  setup.py build      will build the package underneath 'build/'
+  setup.py install    will install the package
+"""
+
+    # options that are not propagated to the commands
+    display_options = [
+        ('help-commands', None,
+         "list all available commands"),
+        ('name', None,
+         "print package name"),
+        ('version', 'V',
+         "print package version"),
+        ('fullname', None,
+         "print <package name>-<version>"),
+        ('author', None,
+         "print the author's name"),
+        ('author-email', None,
+         "print the author's email address"),
+        ('maintainer', None,
+         "print the maintainer's name"),
+        ('maintainer-email', None,
+         "print the maintainer's email address"),
+        ('contact', None,
+         "print the maintainer's name if known, else the author's"),
+        ('contact-email', None,
+         "print the maintainer's email address if known, else the author's"),
+        ('url', None,
+         "print the URL for this package"),
+        ('license', None,
+         "print the license of the package"),
+        ('licence', None,
+         "alias for --license"),
+        ('description', None,
+         "print the package description"),
+        ('long-description', None,
+         "print the long package description"),
+        ('platforms', None,
+         "print the list of platforms"),
+        ('classifiers', None,
+         "print the list of classifiers"),
+        ('keywords', None,
+         "print the list of keywords"),
+        ('provides', None,
+         "print the list of packages/modules provided"),
+        ('requires', None,
+         "print the list of packages/modules required"),
+        ('obsoletes', None,
+         "print the list of packages/modules made obsolete")
+        ]
+    display_option_names = map(lambda x: translate_longopt(x[0]),
+                               display_options)
+
+    # negative options are options that exclude other options
+    negative_opt = {'quiet': 'verbose'}
+
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__ (self, attrs=None):
+        """Construct a new Distribution instance: initialize all the
+        attributes of a Distribution, and then use 'attrs' (a dictionary
+        mapping attribute names to values) to assign some of those
+        attributes their "real" values.  (Any attributes not mentioned in
+        'attrs' will be assigned to some null value: 0, None, an empty list
+        or dictionary, etc.)  Most importantly, initialize the
+        'command_obj' attribute to the empty dictionary; this will be
+        filled in with real command objects by 'parse_command_line()'.
+        """
+
+        # Default values for our command-line options
+        self.verbose = 1
+        self.dry_run = 0
+        self.help = 0
+        for attr in self.display_option_names:
+            setattr(self, attr, 0)
+
+        # Store the distribution meta-data (name, version, author, and so
+        # forth) in a separate object -- we're getting to have enough
+        # information here (and enough command-line options) that it's
+        # worth it.  Also delegate 'get_XXX()' methods to the 'metadata'
+        # object in a sneaky and underhanded (but efficient!) way.
+        self.metadata = DistributionMetadata()
+        for basename in self.metadata._METHOD_BASENAMES:
+            method_name = "get_" + basename
+            setattr(self, method_name, getattr(self.metadata, method_name))
+
+        # 'cmdclass' maps command names to class objects, so we
+        # can 1) quickly figure out which class to instantiate when
+        # we need to create a new command object, and 2) have a way
+        # for the setup script to override command classes
+        self.cmdclass = {}
+
+        # 'command_packages' is a list of packages in which commands
+        # are searched for.  The factory for command 'foo' is expected
+        # to be named 'foo' in the module 'foo' in one of the packages
+        # named here.  This list is searched from the left; an error
+        # is raised if no named package provides the command being
+        # searched for.  (Always access using get_command_packages().)
+        self.command_packages = None
+
+        # 'script_name' and 'script_args' are usually set to sys.argv[0]
+        # and sys.argv[1:], but they can be overridden when the caller is
+        # not necessarily a setup script run from the command-line.
+        self.script_name = None
+        self.script_args = None
+
+        # 'command_options' is where we store command options between
+        # parsing them (from config files, the command-line, etc.) and when
+        # they are actually needed -- ie. when the command in question is
+        # instantiated.  It is a dictionary of dictionaries of 2-tuples:
+        #   command_options = { command_name : { option : (source, value) } }
+        self.command_options = {}
+
+        # 'dist_files' is the list of (command, pyversion, file) that
+        # have been created by any dist commands run so far. This is
+        # filled regardless of whether the run is dry or not. pyversion
+        # gives sysconfig.get_python_version() if the dist file is
+        # specific to a Python version, 'any' if it is good for all
+        # Python versions on the target platform, and '' for a source
+        # file. pyversion should not be used to specify minimum or
+        # maximum required Python versions; use the metainfo for that
+        # instead.
+        self.dist_files = []
+
+        # These options are really the business of various commands, rather
+        # than of the Distribution itself.  We provide aliases for them in
+        # Distribution as a convenience to the developer.
+        self.packages = None
+        self.package_data = {}
+        self.package_dir = None
+        self.py_modules = None
+        self.libraries = None
+        self.headers = None
+        self.ext_modules = None
+        self.ext_package = None
+        self.include_dirs = None
+        self.extra_path = None
+        self.scripts = None
+        self.data_files = None
+        self.password = ''
+
+        # And now initialize bookkeeping stuff that can't be supplied by
+        # the caller at all.  'command_obj' maps command names to
+        # Command instances -- that's how we enforce that every command
+        # class is a singleton.
+        self.command_obj = {}
+
+        # 'have_run' maps command names to boolean values; it keeps track
+        # of whether we have actually run a particular command, to make it
+        # cheap to "run" a command whenever we think we might need to -- if
+        # it's already been done, no need for expensive filesystem
+        # operations, we just check the 'have_run' dictionary and carry on.
+        # It's only safe to query 'have_run' for a command class that has
+        # been instantiated -- a false value will be inserted when the
+        # command object is created, and replaced with a true value when
+        # the command is successfully run.  Thus it's probably best to use
+        # '.get()' rather than a straight lookup.
+        self.have_run = {}
+
+        # Now we'll use the attrs dictionary (ultimately, keyword args from
+        # the setup script) to possibly override any or all of these
+        # distribution options.
+
+        if attrs:
+            # Pull out the set of command options and work on them
+            # specifically.  Note that this order guarantees that aliased
+            # command options will override any supplied redundantly
+            # through the general options dictionary.
+            options = attrs.get('options')
+            if options is not None:
+                del attrs['options']
+                for (command, cmd_options) in options.items():
+                    opt_dict = self.get_option_dict(command)
+                    for (opt, val) in cmd_options.items():
+                        opt_dict[opt] = ("setup script", val)
+
+            if 'licence' in attrs:
+                attrs['license'] = attrs['licence']
+                del attrs['licence']
+                msg = "'licence' distribution option is deprecated; use 'license'"
+                if warnings is not None:
+                    warnings.warn(msg)
+                else:
+                    sys.stderr.write(msg + "\n")
+
+            # Now work on the rest of the attributes.  Any attribute that's
+            # not already defined is invalid!
+            for (key, val) in attrs.items():
+                if hasattr(self.metadata, "set_" + key):
+                    getattr(self.metadata, "set_" + key)(val)
+                elif hasattr(self.metadata, key):
+                    setattr(self.metadata, key, val)
+                elif hasattr(self, key):
+                    setattr(self, key, val)
+                else:
+                    msg = "Unknown distribution option: %s" % repr(key)
+                    if warnings is not None:
+                        warnings.warn(msg)
+                    else:
+                        sys.stderr.write(msg + "\n")
+
+        # no-user-cfg is handled before other command line args
+        # because other args override the config files, and this
+        # one is needed before we can load the config files.
+        # If attrs['script_args'] wasn't passed, assume false.
+        #
+        # This also make sure we just look at the global options
+        self.want_user_cfg = True
+
+        if self.script_args is not None:
+            for arg in self.script_args:
+                if not arg.startswith('-'):
+                    break
+                if arg == '--no-user-cfg':
+                    self.want_user_cfg = False
+                    break
+
+        self.finalize_options()
+
+    def get_option_dict(self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+        dict = self.command_options.get(command)
+        if dict is None:
+            dict = self.command_options[command] = {}
+        return dict
+
+    def dump_option_dicts(self, header=None, commands=None, indent=""):
+        from pprint import pformat
+
+        if commands is None:             # dump all command option dicts
+            commands = self.command_options.keys()
+            commands.sort()
+
+        if header is not None:
+            self.announce(indent + header)
+            indent = indent + "  "
+
+        if not commands:
+            self.announce(indent + "no commands known yet")
+            return
+
+        for cmd_name in commands:
+            opt_dict = self.command_options.get(cmd_name)
+            if opt_dict is None:
+                self.announce(indent +
+                              "no option dict for '%s' command" % cmd_name)
+            else:
+                self.announce(indent +
+                              "option dict for '%s' command:" % cmd_name)
+                out = pformat(opt_dict)
+                for line in out.split('\n'):
+                    self.announce(indent + "  " + line)
+
+    # -- Config file finding/parsing methods ---------------------------
+
+    def find_config_files(self):
+        """Find as many configuration files as should be processed for this
+        platform, and return a list of filenames in the order in which they
+        should be parsed.  The filenames returned are guaranteed to exist
+        (modulo nasty race conditions).
+
+        There are three possible config files: distutils.cfg in the
+        Distutils installation directory (ie. where the top-level
+        Distutils __inst__.py file lives), a file in the user's home
+        directory named .pydistutils.cfg on Unix and pydistutils.cfg
+        on Windows/Mac; and setup.cfg in the current directory.
+
+        The file in the user's home directory can be disabled with the
+        --no-user-cfg option.
+        """
+        files = []
+        check_environ()
+
+        # Where to look for the system-wide Distutils config file
+        sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
+
+        # Look for the system config file
+        sys_file = os.path.join(sys_dir, "distutils.cfg")
+        if os.path.isfile(sys_file):
+            files.append(sys_file)
+
+        # What to call the per-user config file
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        # And look for the user config file
+        if self.want_user_cfg:
+            user_file = os.path.join(os.path.expanduser('~'), user_filename)
+            if os.path.isfile(user_file):
+                files.append(user_file)
+
+        # All platforms support local setup.cfg
+        local_file = "setup.cfg"
+        if os.path.isfile(local_file):
+            files.append(local_file)
+
+        if DEBUG:
+            self.announce("using config files: %s" % ', '.join(files))
+
+        return files
+
+    def parse_config_files(self, filenames=None):
+        from ConfigParser import ConfigParser
+
+        if filenames is None:
+            filenames = self.find_config_files()
+
+        if DEBUG:
+            self.announce("Distribution.parse_config_files():")
+
+        parser = ConfigParser()
+        for filename in filenames:
+            if DEBUG:
+                self.announce("  reading %s" % filename)
+            parser.read(filename)
+            for section in parser.sections():
+                options = parser.options(section)
+                opt_dict = self.get_option_dict(section)
+
+                for opt in options:
+                    if opt != '__name__':
+                        val = parser.get(section,opt)
+                        opt = opt.replace('-', '_')
+                        opt_dict[opt] = (filename, val)
+
+            # Make the ConfigParser forget everything (so we retain
+            # the original filenames that options come from)
+            parser.__init__()
+
+        # If there was a "global" section in the config file, use it
+        # to set Distribution options.
+
+        if 'global' in self.command_options:
+            for (opt, (src, val)) in self.command_options['global'].items():
+                alias = self.negative_opt.get(opt)
+                try:
+                    if alias:
+                        setattr(self, alias, not strtobool(val))
+                    elif opt in ('verbose', 'dry_run'): # ugh!
+                        setattr(self, opt, strtobool(val))
+                    else:
+                        setattr(self, opt, val)
+                except ValueError, msg:
+                    raise DistutilsOptionError, msg
+
+    # -- Command-line parsing methods ----------------------------------
+
+    def parse_command_line(self):
+        """Parse the setup script's command line, taken from the
+        'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+        -- see 'setup()' in core.py).  This list is first processed for
+        "global options" -- options that set attributes of the Distribution
+        instance.  Then, it is alternately scanned for Distutils commands
+        and options for that command.  Each new command terminates the
+        options for the previous command.  The allowed options for a
+        command are determined by the 'user_options' attribute of the
+        command class -- thus, we have to be able to load command classes
+        in order to parse the command line.  Any error in that 'options'
+        attribute raises DistutilsGetoptError; any error on the
+        command-line raises DistutilsArgError.  If no Distutils commands
+        were found on the command line, raises DistutilsArgError.  Return
+        true if command-line was successfully parsed and we should carry
+        on with executing commands; false if no errors but we shouldn't
+        execute commands (currently, this only happens if user asks for
+        help).
+        """
+        #
+        # We now have enough information to show the Macintosh dialog
+        # that allows the user to interactively specify the "command line".
+        #
+        toplevel_options = self._get_toplevel_options()
+
+        # We have to parse the command line a bit at a time -- global
+        # options, then the first command, then its options, and so on --
+        # because each command will be handled by a different class, and
+        # the options that are valid for a particular class aren't known
+        # until we have loaded the command class, which doesn't happen
+        # until we know what the command is.
+
+        self.commands = []
+        parser = FancyGetopt(toplevel_options + self.display_options)
+        parser.set_negative_aliases(self.negative_opt)
+        parser.set_aliases({'licence': 'license'})
+        args = parser.getopt(args=self.script_args, object=self)
+        option_order = parser.get_option_order()
+        log.set_verbosity(self.verbose)
+
+        # for display options we return immediately
+        if self.handle_display_options(option_order):
+            return
+        while args:
+            args = self._parse_command_opts(parser, args)
+            if args is None:            # user asked for help (and got it)
+                return
+
+        # Handle the cases of --help as a "global" option, ie.
+        # "setup.py --help" and "setup.py --help command ...".  For the
+        # former, we show global options (--verbose, --dry-run, etc.)
+        # and display-only options (--name, --version, etc.); for the
+        # latter, we omit the display-only options and show help for
+        # each command listed on the command line.
+        if self.help:
+            self._show_help(parser,
+                            display_options=len(self.commands) == 0,
+                            commands=self.commands)
+            return
+
+        # Oops, no commands found -- an end-user error
+        if not self.commands:
+            raise DistutilsArgError, "no commands supplied"
+
+        # All is well: return true
+        return 1
+
+    def _get_toplevel_options(self):
+        """Return the non-display options recognized at the top level.
+
+        This includes options that are recognized *only* at the top
+        level as well as options recognized for commands.
+        """
+        return self.global_options + [
+            ("command-packages=", None,
+             "list of packages that provide distutils commands"),
+            ]
+
+    def _parse_command_opts(self, parser, args):
+        """Parse the command-line options for a single command.
+        'parser' must be a FancyGetopt instance; 'args' must be the list
+        of arguments, starting with the current command (whose options
+        we are about to parse).  Returns a new version of 'args' with
+        the next command at the front of the list; will be the empty
+        list if there are no more commands on the command line.  Returns
+        None if the user asked for help on this command.
+        """
+        # late import because of mutual dependence between these modules
+        from distutils.cmd import Command
+
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit, "invalid command name '%s'" % command
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = self.get_command_class(command)
+        except DistutilsModuleError, msg:
+            raise DistutilsArgError, msg
+
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        if not issubclass(cmd_class, Command):
+            raise DistutilsClassError, \
+                  "command class %s must subclass Command" % cmd_class
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                isinstance(cmd_class.user_options, list)):
+            raise DistutilsClassError, \
+                  ("command class %s must provide " +
+                   "'user_options' attribute (a list of tuples)") % \
+                  cmd_class
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        negative_opt = self.negative_opt
+        if hasattr(cmd_class, 'negative_opt'):
+            negative_opt = negative_opt.copy()
+            negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_options = fix_help_options(cmd_class.help_options)
+        else:
+            help_options = []
+
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(self.global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(negative_opt)
+        (args, opts) = parser.getopt(args[1:])
+        if hasattr(opts, 'help') and opts.help:
+            self._show_help(parser, display_options=0, commands=[cmd_class])
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            isinstance(cmd_class.help_options, list)):
+            help_option_found=0
+            for (help_option, short, desc, func) in cmd_class.help_options:
+                if hasattr(opts, parser.get_attr_name(help_option)):
+                    help_option_found=1
+                    if hasattr(func, '__call__'):
+                        func()
+                    else:
+                        raise DistutilsClassError(
+                            "invalid help function %r for help option '%s': "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command-line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for (name, value) in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    def finalize_options(self):
+        """Set final values for all the options on the Distribution
+        instance, analogous to the .finalize_options() method of Command
+        objects.
+        """
+        for attr in ('keywords', 'platforms'):
+            value = getattr(self.metadata, attr)
+            if value is None:
+                continue
+            if isinstance(value, str):
+                value = [elm.strip() for elm in value.split(',')]
+                setattr(self.metadata, attr, value)
+
+    def _show_help(self, parser, global_options=1, display_options=1,
+                   commands=[]):
+        """Show help for the setup script command-line in the form of
+        several lists of command-line options.  'parser' should be a
+        FancyGetopt instance; do not expect it to be returned in the
+        same state, as its option table will be reset to make it
+        generate the correct help text.
+
+        If 'global_options' is true, lists the global options:
+        --verbose, --dry-run, etc.  If 'display_options' is true, lists
+        the "display-only" options: --name, --version, etc.  Finally,
+        lists per-command help for every command name or command class
+        in 'commands'.
+        """
+        # late import because of mutual dependence between these modules
+        from distutils.core import gen_usage
+        from distutils.cmd import Command
+
+        if global_options:
+            if display_options:
+                options = self._get_toplevel_options()
+            else:
+                options = self.global_options
+            parser.set_option_table(options)
+            parser.print_help(self.common_usage + "\nGlobal options:")
+            print('')
+
+        if display_options:
+            parser.set_option_table(self.display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print('')
+
+        for command in self.commands:
+            if isinstance(command, type) and issubclass(command, Command):
+                klass = command
+            else:
+                klass = self.get_command_class(command)
+            if (hasattr(klass, 'help_options') and
+                isinstance(klass.help_options, list)):
+                parser.set_option_table(klass.user_options +
+                                        fix_help_options(klass.help_options))
+            else:
+                parser.set_option_table(klass.user_options)
+            parser.print_help("Options for '%s' command:" % klass.__name__)
+            print('')
+
+        print(gen_usage(self.script_name))
+
+    def handle_display_options(self, option_order):
+        """If there were any non-global "display-only" options
+        (--help-commands or the metadata display options) on the command
+        line, display the requested info and return true; else return
+        false.
+        """
+        from distutils.core import gen_usage
+
+        # User just wants a list of commands -- we'll print it out and stop
+        # processing now (ie. if they ran "setup --help-commands foo bar",
+        # we ignore "foo bar").
+        if self.help_commands:
+            self.print_commands()
+            print('')
+            print(gen_usage(self.script_name))
+            return 1
+
+        # If user supplied any of the "display metadata" options, then
+        # display that metadata in the order in which the user supplied the
+        # metadata options.
+        any_display_options = 0
+        is_display_option = {}
+        for option in self.display_options:
+            is_display_option[option[0]] = 1
+
+        for (opt, val) in option_order:
+            if val and is_display_option.get(opt):
+                opt = translate_longopt(opt)
+                value = getattr(self.metadata, "get_"+opt)()
+                if opt in ['keywords', 'platforms']:
+                    print(','.join(value))
+                elif opt in ('classifiers', 'provides', 'requires',
+                             'obsoletes'):
+                    print('\n'.join(value))
+                else:
+                    print(value)
+                any_display_options = 1
+
+        return any_display_options
+
+    def print_command_list(self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+        print(header + ":")
+
+        for cmd in commands:
+            klass = self.cmdclass.get(cmd)
+            if not klass:
+                klass = self.get_command_class(cmd)
+            try:
+                description = klass.description
+            except AttributeError:
+                description = "(no description available)"
+
+            print("  %-*s  %s" % (max_length, cmd, description))
+
+    def print_commands(self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into "standard commands"
+        (listed in distutils.command.__all__) and "extra commands"
+        (mentioned in self.cmdclass, but not a standard command).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+        import distutils.command
+        std_commands = distutils.command.__all__
+        is_std = {}
+        for cmd in std_commands:
+            is_std[cmd] = 1
+
+        extra_commands = []
+        for cmd in self.cmdclass.keys():
+            if not is_std.get(cmd):
+                extra_commands.append(cmd)
+
+        max_length = 0
+        for cmd in (std_commands + extra_commands):
+            if len(cmd) > max_length:
+                max_length = len(cmd)
+
+        self.print_command_list(std_commands,
+                                "Standard commands",
+                                max_length)
+        if extra_commands:
+            print
+            self.print_command_list(extra_commands,
+                                    "Extra commands",
+                                    max_length)
+
+    def get_command_list(self):
+        """Get a list of (command, description) tuples.
+        The list is divided into "standard commands" (listed in
+        distutils.command.__all__) and "extra commands" (mentioned in
+        self.cmdclass, but not a standard command).  The descriptions come
+        from the command class attribute 'description'.
+        """
+        # Currently this is only used on Mac OS, for the Mac-only GUI
+        # Distutils interface (by Jack Jansen)
+
+        import distutils.command
+        std_commands = distutils.command.__all__
+        is_std = {}
+        for cmd in std_commands:
+            is_std[cmd] = 1
+
+        extra_commands = []
+        for cmd in self.cmdclass.keys():
+            if not is_std.get(cmd):
+                extra_commands.append(cmd)
+
+        rv = []
+        for cmd in (std_commands + extra_commands):
+            klass = self.cmdclass.get(cmd)
+            if not klass:
+                klass = self.get_command_class(cmd)
+            try:
+                description = klass.description
+            except AttributeError:
+                description = "(no description available)"
+            rv.append((cmd, description))
+        return rv
+
+    # -- Command class/object methods ----------------------------------
+
+    def get_command_packages(self):
+        """Return a list of packages from which commands are loaded."""
+        pkgs = self.command_packages
+        if not isinstance(pkgs, list):
+            if pkgs is None:
+                pkgs = ''
+            pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != '']
+            if "distutils.command" not in pkgs:
+                pkgs.insert(0, "distutils.command")
+            self.command_packages = pkgs
+        return pkgs
+
+    def get_command_class(self, command):
+        """Return the class that implements the Distutils command named by
+        'command'.  First we check the 'cmdclass' dictionary; if the
+        command is mentioned there, we fetch the class object from the
+        dictionary and return it.  Otherwise we load the command module
+        ("distutils.command." + command) and fetch the command class from
+        the module.  The loaded class is also stored in 'cmdclass'
+        to speed future calls to 'get_command_class()'.
+
+        Raises DistutilsModuleError if the expected module could not be
+        found, or if that module does not define the expected class.
+        """
+        klass = self.cmdclass.get(command)
+        if klass:
+            return klass
+
+        for pkgname in self.get_command_packages():
+            module_name = "%s.%s" % (pkgname, command)
+            klass_name = command
+
+            try:
+                __import__ (module_name)
+                module = sys.modules[module_name]
+            except ImportError:
+                continue
+
+            try:
+                klass = getattr(module, klass_name)
+            except AttributeError:
+                raise DistutilsModuleError, \
+                      "invalid command '%s' (no class '%s' in module '%s')" \
+                      % (command, klass_name, module_name)
+
+            self.cmdclass[command] = klass
+            return klass
+
+        raise DistutilsModuleError("invalid command '%s'" % command)
+
+
+    def get_command_obj(self, command, create=1):
+        """Return the command object for 'command'.  Normally this object
+        is cached on a previous call to 'get_command_obj()'; if no command
+        object for 'command' is in the cache, then we either create and
+        return it (if 'create' is true) or return None.
+        """
+        cmd_obj = self.command_obj.get(command)
+        if not cmd_obj and create:
+            if DEBUG:
+                self.announce("Distribution.get_command_obj(): " \
+                              "creating '%s' command object" % command)
+
+            klass = self.get_command_class(command)
+            cmd_obj = self.command_obj[command] = klass(self)
+            self.have_run[command] = 0
+
+            # Set any options that were supplied in config files
+            # or on the command line.  (NB. support for error
+            # reporting is lame here: any errors aren't reported
+            # until 'finalize_options()' is called, which means
+            # we won't report the source of the error.)
+            options = self.command_options.get(command)
+            if options:
+                self._set_command_options(cmd_obj, options)
+
+        return cmd_obj
+
+    def _set_command_options(self, command_obj, option_dict=None):
+        """Set the options for 'command_obj' from 'option_dict'.  Basically
+        this means copying elements of a dictionary ('option_dict') to
+        attributes of an instance ('command').
+
+        'command_obj' must be a Command instance.  If 'option_dict' is not
+        supplied, uses the standard option dictionary for this command
+        (from 'self.command_options').
+        """
+        command_name = command_obj.get_command_name()
+        if option_dict is None:
+            option_dict = self.get_option_dict(command_name)
+
+        if DEBUG:
+            self.announce("  setting options for '%s' command:" % command_name)
+        for (option, (source, value)) in option_dict.items():
+            if DEBUG:
+                self.announce("    %s = %s (from %s)" % (option, value,
+                                                         source))
+            try:
+                bool_opts = map(translate_longopt, command_obj.boolean_options)
+            except AttributeError:
+                bool_opts = []
+            try:
+                neg_opt = command_obj.negative_opt
+            except AttributeError:
+                neg_opt = {}
+
+            try:
+                is_string = isinstance(value, str)
+                if option in neg_opt and is_string:
+                    setattr(command_obj, neg_opt[option], not strtobool(value))
+                elif option in bool_opts and is_string:
+                    setattr(command_obj, option, strtobool(value))
+                elif hasattr(command_obj, option):
+                    setattr(command_obj, option, value)
+                else:
+                    raise DistutilsOptionError, \
+                          ("error in %s: command '%s' has no such option '%s'"
+                           % (source, command_name, option))
+            except ValueError, msg:
+                raise DistutilsOptionError, msg
+
+    def reinitialize_command(self, command, reinit_subcommands=0):
+        """Reinitializes a command to the state it was in when first
+        returned by 'get_command_obj()': ie., initialized but not yet
+        finalized.  This provides the opportunity to sneak option
+        values in programmatically, overriding or supplementing
+        user-supplied values from the config files and command line.
+        You'll have to re-finalize the command object (by calling
+        'finalize_options()' or 'ensure_finalized()') before using it for
+        real.
+
+        'command' should be a command name (string) or command object.  If
+        'reinit_subcommands' is true, also reinitializes the command's
+        sub-commands, as declared by the 'sub_commands' class attribute (if
+        it has one).  See the "install" command for an example.  Only
+        reinitializes the sub-commands that actually matter, ie. those
+        whose test predicates return true.
+
+        Returns the reinitialized command object.
+        """
+        from distutils.cmd import Command
+        if not isinstance(command, Command):
+            command_name = command
+            command = self.get_command_obj(command_name)
+        else:
+            command_name = command.get_command_name()
+
+        if not command.finalized:
+            return command
+        command.initialize_options()
+        command.finalized = 0
+        self.have_run[command_name] = 0
+        self._set_command_options(command)
+
+        if reinit_subcommands:
+            for sub in command.get_sub_commands():
+                self.reinitialize_command(sub, reinit_subcommands)
+
+        return command
+
+    # -- Methods that operate on the Distribution ----------------------
+
+    def announce(self, msg, level=log.INFO):
+        log.log(level, msg)
+
+    def run_commands(self):
+        """Run each command that was seen on the setup script command line.
+        Uses the list of commands found and cache of command objects
+        created by 'get_command_obj()'.
+        """
+        for cmd in self.commands:
+            self.run_command(cmd)
+
+    # -- Methods that operate on its Commands --------------------------
+
+    def run_command(self, command):
+        """Do whatever it takes to run a command (including nothing at all,
+        if the command has already been run).  Specifically: if we have
+        already created and run the command named by 'command', return
+        silently without doing anything.  If the command named by 'command'
+        doesn't even have a command object yet, create one.  Then invoke
+        'run()' on that command object (or an existing one).
+        """
+        # Already been here, done that? then return silently.
+        if self.have_run.get(command):
+            return
+
+        log.info("running %s", command)
+        cmd_obj = self.get_command_obj(command)
+        cmd_obj.ensure_finalized()
+        cmd_obj.run()
+        self.have_run[command] = 1
+
+
+    # -- Distribution query methods ------------------------------------
+
+    def has_pure_modules(self):
+        return len(self.packages or self.py_modules or []) > 0
+
+    def has_ext_modules(self):
+        return self.ext_modules and len(self.ext_modules) > 0
+
+    def has_c_libraries(self):
+        return self.libraries and len(self.libraries) > 0
+
+    def has_modules(self):
+        return self.has_pure_modules() or self.has_ext_modules()
+
+    def has_headers(self):
+        return self.headers and len(self.headers) > 0
+
+    def has_scripts(self):
+        return self.scripts and len(self.scripts) > 0
+
+    def has_data_files(self):
+        return self.data_files and len(self.data_files) > 0
+
+    def is_pure(self):
+        return (self.has_pure_modules() and
+                not self.has_ext_modules() and
+                not self.has_c_libraries())
+
+    # -- Metadata query methods ----------------------------------------
+
+    # If you're looking for 'get_name()', 'get_version()', and so forth,
+    # they are defined in a sneaky way: the constructor binds self.get_XXX
+    # to self.metadata.get_XXX.  The actual code is in the
+    # DistributionMetadata class, below.
+
+class DistributionMetadata:
+    """Dummy class to hold the distribution meta-data: name, version,
+    author, and so forth.
+    """
+
+    _METHOD_BASENAMES = ("name", "version", "author", "author_email",
+                         "maintainer", "maintainer_email", "url",
+                         "license", "description", "long_description",
+                         "keywords", "platforms", "fullname", "contact",
+                         "contact_email", "license", "classifiers",
+                         "download_url",
+                         # PEP 314
+                         "provides", "requires", "obsoletes",
+                         )
+
+    def __init__(self, path=None):
+        if path is not None:
+            self.read_pkg_file(open(path))
+        else:
+            self.name = None
+            self.version = None
+            self.author = None
+            self.author_email = None
+            self.maintainer = None
+            self.maintainer_email = None
+            self.url = None
+            self.license = None
+            self.description = None
+            self.long_description = None
+            self.keywords = None
+            self.platforms = None
+            self.classifiers = None
+            self.download_url = None
+            # PEP 314
+            self.provides = None
+            self.requires = None
+            self.obsoletes = None
+
+    def read_pkg_file(self, file):
+        """Reads the metadata values from a file object."""
+        msg = message_from_file(file)
+
+        def _read_field(name):
+            value = msg[name]
+            if value == 'UNKNOWN':
+                return None
+            return value
+
+        def _read_list(name):
+            values = msg.get_all(name, None)
+            if values == []:
+                return None
+            return values
+
+        metadata_version = msg['metadata-version']
+        self.name = _read_field('name')
+        self.version = _read_field('version')
+        self.description = _read_field('summary')
+        # we are filling author only.
+        self.author = _read_field('author')
+        self.maintainer = None
+        self.author_email = _read_field('author-email')
+        self.maintainer_email = None
+        self.url = _read_field('home-page')
+        self.license = _read_field('license')
+
+        if 'download-url' in msg:
+            self.download_url = _read_field('download-url')
+        else:
+            self.download_url = None
+
+        self.long_description = _read_field('description')
+        self.description = _read_field('summary')
+
+        if 'keywords' in msg:
+            self.keywords = _read_field('keywords').split(',')
+
+        self.platforms = _read_list('platform')
+        self.classifiers = _read_list('classifier')
+
+        # PEP 314 - these fields only exist in 1.1
+        if metadata_version == '1.1':
+            self.requires = _read_list('requires')
+            self.provides = _read_list('provides')
+            self.obsoletes = _read_list('obsoletes')
+        else:
+            self.requires = None
+            self.provides = None
+            self.obsoletes = None
+
+    def write_pkg_info(self, base_dir):
+        """Write the PKG-INFO file into the release tree.
+        """
+        pkg_info = open(os.path.join(base_dir, 'PKG-INFO'), 'w')
+        try:
+            self.write_pkg_file(pkg_info)
+        finally:
+            pkg_info.close()
+
+    def write_pkg_file(self, file):
+        """Write the PKG-INFO format data to a file object.
+        """
+        version = '1.0'
+        if self.provides or self.requires or self.obsoletes:
+            version = '1.1'
+
+        self._write_field(file, 'Metadata-Version', version)
+        self._write_field(file, 'Name', self.get_name())
+        self._write_field(file, 'Version', self.get_version())
+        self._write_field(file, 'Summary', self.get_description())
+        self._write_field(file, 'Home-page', self.get_url())
+        self._write_field(file, 'Author', self.get_contact())
+        self._write_field(file, 'Author-email', self.get_contact_email())
+        self._write_field(file, 'License', self.get_license())
+        if self.download_url:
+            self._write_field(file, 'Download-URL', self.download_url)
+
+        long_desc = rfc822_escape(self.get_long_description())
+        self._write_field(file, 'Description', long_desc)
+
+        keywords = ','.join(self.get_keywords())
+        if keywords:
+            self._write_field(file, 'Keywords', keywords)
+
+        self._write_list(file, 'Platform', self.get_platforms())
+        self._write_list(file, 'Classifier', self.get_classifiers())
+
+        # PEP 314
+        self._write_list(file, 'Requires', self.get_requires())
+        self._write_list(file, 'Provides', self.get_provides())
+        self._write_list(file, 'Obsoletes', self.get_obsoletes())
+
+    def _write_field(self, file, name, value):
+        file.write('%s: %s\n' % (name, self._encode_field(value)))
+
+    def _write_list (self, file, name, values):
+        for value in values:
+            self._write_field(file, name, value)
+
+    def _encode_field(self, value):
+        if value is None:
+            return None
+        if isinstance(value, unicode):
+            return value.encode(PKG_INFO_ENCODING)
+        return str(value)
+
+    # -- Metadata query methods ----------------------------------------
+
+    def get_name(self):
+        return self.name or "UNKNOWN"
+
+    def get_version(self):
+        return self.version or "0.0.0"
+
+    def get_fullname(self):
+        return "%s-%s" % (self.get_name(), self.get_version())
+
+    def get_author(self):
+        return self._encode_field(self.author) or "UNKNOWN"
+
+    def get_author_email(self):
+        return self.author_email or "UNKNOWN"
+
+    def get_maintainer(self):
+        return self._encode_field(self.maintainer) or "UNKNOWN"
+
+    def get_maintainer_email(self):
+        return self.maintainer_email or "UNKNOWN"
+
+    def get_contact(self):
+        return (self._encode_field(self.maintainer) or
+                self._encode_field(self.author) or "UNKNOWN")
+
+    def get_contact_email(self):
+        return self.maintainer_email or self.author_email or "UNKNOWN"
+
+    def get_url(self):
+        return self.url or "UNKNOWN"
+
+    def get_license(self):
+        return self.license or "UNKNOWN"
+    get_licence = get_license
+
+    def get_description(self):
+        return self._encode_field(self.description) or "UNKNOWN"
+
+    def get_long_description(self):
+        return self._encode_field(self.long_description) or "UNKNOWN"
+
+    def get_keywords(self):
+        return self.keywords or []
+
+    def get_platforms(self):
+        return self.platforms or ["UNKNOWN"]
+
+    def get_classifiers(self):
+        return self.classifiers or []
+
+    def get_download_url(self):
+        return self.download_url or "UNKNOWN"
+
+    # PEP 314
+    def get_requires(self):
+        return self.requires or []
+
+    def set_requires(self, value):
+        import distutils.versionpredicate
+        for v in value:
+            distutils.versionpredicate.VersionPredicate(v)
+        self.requires = value
+
+    def get_provides(self):
+        return self.provides or []
+
+    def set_provides(self, value):
+        value = [v.strip() for v in value]
+        for v in value:
+            import distutils.versionpredicate
+            distutils.versionpredicate.split_provision(v)
+        self.provides = value
+
+    def get_obsoletes(self):
+        return self.obsoletes or []
+
+    def set_obsoletes(self, value):
+        import distutils.versionpredicate
+        for v in value:
+            distutils.versionpredicate.VersionPredicate(v)
+        self.obsoletes = value
+
+def fix_help_options(options):
+    """Convert a 4-tuple 'help_options' list as found in various command
+    classes to the 3-tuple form required by FancyGetopt.
+    """
+    new_options = []
+    for help_tuple in options:
+        new_options.append(help_tuple[0:3])
+    return new_options
diff --git a/lib/distutils/distutils/emxccompiler.py b/lib/distutils/distutils/emxccompiler.py
new file mode 100644
index 0000000..a017205
--- /dev/null
+++ b/lib/distutils/distutils/emxccompiler.py
@@ -0,0 +1,319 @@
+"""distutils.emxccompiler
+
+Provides the EMXCCompiler class, a subclass of UnixCCompiler that
+handles the EMX port of the GNU C compiler to OS/2.
+"""
+
+# issues:
+#
+# * OS/2 insists that DLLs can have names no longer than 8 characters
+#   We put export_symbols in a def-file, as though the DLL can have
+#   an arbitrary length name, but truncate the output filename.
+#
+# * only use OMF objects and use LINK386 as the linker (-Zomf)
+#
+# * always build for multithreading (-Zmt) as the accompanying OS/2 port
+#   of Python is only distributed with threads enabled.
+#
+# tested configurations:
+#
+# * EMX gcc 2.81/EMX 0.9d fix03
+
+__revision__ = "$Id$"
+
+import os,sys,copy
+from distutils.ccompiler import gen_preprocess_options, gen_lib_options
+from distutils.unixccompiler import UnixCCompiler
+from distutils.file_util import write_file
+from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
+from distutils import log
+
+class EMXCCompiler (UnixCCompiler):
+
+    compiler_type = 'emx'
+    obj_extension = ".obj"
+    static_lib_extension = ".lib"
+    shared_lib_extension = ".dll"
+    static_lib_format = "%s%s"
+    shared_lib_format = "%s%s"
+    res_extension = ".res"      # compiled resource file
+    exe_extension = ".exe"
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        UnixCCompiler.__init__ (self, verbose, dry_run, force)
+
+        (status, details) = check_config_h()
+        self.debug_print("Python's GCC status: %s (details: %s)" %
+                         (status, details))
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler.  " +
+                ("Reason: %s." % details) +
+                "Compiling may fail because of undefined preprocessor macros.")
+
+        (self.gcc_version, self.ld_version) = \
+            get_versions()
+        self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
+                         (self.gcc_version,
+                          self.ld_version) )
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
+                             compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
+                             linker_exe='gcc -Zomf -Zmt -Zcrtdll',
+                             linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
+
+        # want the gcc library statically linked (so that we don't have
+        # to distribute a version dependent on the compiler we have)
+        self.dll_libraries=["gcc"]
+
+    # __init__ ()
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        if ext == '.rc':
+            # gcc requires '.rc' compiled to binary ('.res') files !!!
+            try:
+                self.spawn(["rc", "-r", src])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE)):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            (dll_name, dll_extension) = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
+                os.path.splitext(os.path.basename(output_filename))[0],
+                "DATA MULTIPLE NONSHARED",
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append('  "%s"' % sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+            # for gcc/ld the def-file is specified as any other object files
+            objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self,
+                           target_desc,
+                           objects,
+                           output_filename,
+                           output_dir,
+                           libraries,
+                           library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug,
+                           extra_preargs,
+                           extra_postargs,
+                           build_temp,
+                           target_lang)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    # override the object_filenames method from CCompiler to
+    # support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.rc':
+                # these need to be compiled to object files
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.res_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+    # override the find_library_file method from UnixCCompiler
+    # to deal with file naming/searching differences
+    def find_library_file(self, dirs, lib, debug=0):
+        shortlib = '%s.lib' % lib
+        longlib = 'lib%s.lib' % lib    # this form very rare
+
+        # get EMX's default library directory search path
+        try:
+            emx_dirs = os.environ['LIBRARY_PATH'].split(';')
+        except KeyError:
+            emx_dirs = []
+
+        for dir in dirs + emx_dirs:
+            shortlibp = os.path.join(dir, shortlib)
+            longlibp = os.path.join(dir, longlib)
+            if os.path.exists(shortlibp):
+                return shortlibp
+            elif os.path.exists(longlibp):
+                return longlibp
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
+
+# class EMXCCompiler
+
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+
+    """Check if the current Python installation (specifically, pyconfig.h)
+    appears amenable to building extensions with GCC.  Returns a tuple
+    (status, details), where 'status' is one of the following constants:
+      CONFIG_H_OK
+        all is well, go ahead and compile
+      CONFIG_H_NOTOK
+        doesn't look good
+      CONFIG_H_UNCERTAIN
+        not sure -- unable to read pyconfig.h
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+
+    from distutils import sysconfig
+    import string
+    # if sys.version contains GCC then python was compiled with
+    # GCC, and the pyconfig.h file should be OK
+    if string.find(sys.version,"GCC") >= 0:
+        return (CONFIG_H_OK, "sys.version mentions 'GCC'")
+
+    fn = sysconfig.get_config_h_filename()
+    try:
+        # It would probably better to read single lines to search.
+        # But we do this only once, and it is fast enough
+        f = open(fn)
+        try:
+            s = f.read()
+        finally:
+            f.close()
+
+    except IOError, exc:
+        # if we can't read this file, we cannot say it is wrong
+        # the compiler will complain later about this file as missing
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
+
+    else:
+        # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
+        if string.find(s,"__GNUC__") >= 0:
+            return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
+        else:
+            return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
+
+
+def get_versions():
+    """ Try to find out the versions of gcc and ld.
+        If not possible it returns None for it.
+    """
+    from distutils.version import StrictVersion
+    from distutils.spawn import find_executable
+    import re
+
+    gcc_exe = find_executable('gcc')
+    if gcc_exe:
+        out = os.popen(gcc_exe + ' -dumpversion','r')
+        try:
+            out_string = out.read()
+        finally:
+            out.close()
+        result = re.search('(\d+\.\d+\.\d+)',out_string)
+        if result:
+            gcc_version = StrictVersion(result.group(1))
+        else:
+            gcc_version = None
+    else:
+        gcc_version = None
+    # EMX ld has no way of reporting version number, and we use GCC
+    # anyway - so we can link OMF DLLs
+    ld_version = None
+    return (gcc_version, ld_version)
diff --git a/lib/distutils/distutils/errors.py b/lib/distutils/distutils/errors.py
new file mode 100644
index 0000000..d9c47c7
--- /dev/null
+++ b/lib/distutils/distutils/errors.py
@@ -0,0 +1,88 @@
+"""distutils.errors
+
+Provides exceptions used by the Distutils modules.  Note that Distutils
+modules may raise standard exceptions; in particular, SystemExit is
+usually raised for errors that are obviously the end-user's fault
+(eg. bad command-line arguments).
+
+This module is safe to use in "from ... import *" mode; it only exports
+symbols whose names start with "Distutils" and end with "Error"."""
+
+__revision__ = "$Id$"
+
+class DistutilsError(Exception):
+    """The root of all Distutils evil."""
+
+class DistutilsModuleError(DistutilsError):
+    """Unable to load an expected module, or to find an expected class
+    within some module (in particular, command modules and classes)."""
+
+class DistutilsClassError(DistutilsError):
+    """Some command class (or possibly distribution class, if anyone
+    feels a need to subclass Distribution) is found not to be holding
+    up its end of the bargain, ie. implementing some part of the
+    "command "interface."""
+
+class DistutilsGetoptError(DistutilsError):
+    """The option table provided to 'fancy_getopt()' is bogus."""
+
+class DistutilsArgError(DistutilsError):
+    """Raised by fancy_getopt in response to getopt.error -- ie. an
+    error in the command line usage."""
+
+class DistutilsFileError(DistutilsError):
+    """Any problems in the filesystem: expected file not found, etc.
+    Typically this is for problems that we detect before IOError or
+    OSError could be raised."""
+
+class DistutilsOptionError(DistutilsError):
+    """Syntactic/semantic errors in command options, such as use of
+    mutually conflicting options, or inconsistent options,
+    badly-spelled values, etc.  No distinction is made between option
+    values originating in the setup script, the command line, config
+    files, or what-have-you -- but if we *know* something originated in
+    the setup script, we'll raise DistutilsSetupError instead."""
+
+class DistutilsSetupError(DistutilsError):
+    """For errors that can be definitely blamed on the setup script,
+    such as invalid keyword arguments to 'setup()'."""
+
+class DistutilsPlatformError(DistutilsError):
+    """We don't know how to do something on the current platform (but
+    we do know how to do it on some platform) -- eg. trying to compile
+    C files on a platform not supported by a CCompiler subclass."""
+
+class DistutilsExecError(DistutilsError):
+    """Any problems executing an external program (such as the C
+    compiler, when compiling C files)."""
+
+class DistutilsInternalError(DistutilsError):
+    """Internal inconsistencies or impossibilities (obviously, this
+    should never be seen if the code is working!)."""
+
+class DistutilsTemplateError(DistutilsError):
+    """Syntax error in a file list template."""
+
+class DistutilsByteCompileError(DistutilsError):
+    """Byte compile error."""
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError(Exception):
+    """Some compile/link operation failed."""
+
+class PreprocessError(CCompilerError):
+    """Failure to preprocess one or more C/C++ files."""
+
+class CompileError(CCompilerError):
+    """Failure to compile one or more C/C++ source files."""
+
+class LibError(CCompilerError):
+    """Failure to create a static library from one or more C/C++ object
+    files."""
+
+class LinkError(CCompilerError):
+    """Failure to link one or more C/C++ object files into an executable
+    or shared library file."""
+
+class UnknownFileError(CCompilerError):
+    """Attempt to process an unknown file type."""
diff --git a/lib/distutils/distutils/extension.py b/lib/distutils/distutils/extension.py
new file mode 100644
index 0000000..9a67ca8
--- /dev/null
+++ b/lib/distutils/distutils/extension.py
@@ -0,0 +1,255 @@
+"""distutils.extension
+
+Provides the Extension class, used to describe C/C++ extension
+modules in setup scripts."""
+
+__revision__ = "$Id$"
+
+import os, string, sys
+from types import *
+
+try:
+    import warnings
+except ImportError:
+    warnings = None
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext.  However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+class Extension:
+    """Just a collection of attributes that describes an extension
+    module and everything needed to build it (hopefully in a portable
+    way, but there are hooks that let you be as unportable as you need).
+
+    Instance attributes:
+      name : string
+        the full name of the extension, including any packages -- ie.
+        *not* a filename or pathname, but Python dotted name
+      sources : [string]
+        list of source filenames, relative to the distribution root
+        (where the setup script lives), in Unix form (slash-separated)
+        for portability.  Source files may be C, C++, SWIG (.i),
+        platform-specific resource files, or whatever else is recognized
+        by the "build_ext" command as source for a Python extension.
+      include_dirs : [string]
+        list of directories to search for C/C++ header files (in Unix
+        form for portability)
+      define_macros : [(name : string, value : string|None)]
+        list of macros to define; each macro is defined using a 2-tuple,
+        where 'value' is either the string to define it to or None to
+        define it without a particular value (equivalent of "#define
+        FOO" in source or -DFOO on Unix C compiler command line)
+      undef_macros : [string]
+        list of macros to undefine explicitly
+      library_dirs : [string]
+        list of directories to search for C/C++ libraries at link time
+      libraries : [string]
+        list of library names (not filenames or paths) to link against
+      runtime_library_dirs : [string]
+        list of directories to search for C/C++ libraries at run time
+        (for shared extensions, this is when the extension is loaded)
+      extra_objects : [string]
+        list of extra files to link with (eg. object files not implied
+        by 'sources', static library that must be explicitly specified,
+        binary resource files, etc.)
+      extra_compile_args : [string]
+        any extra platform- and compiler-specific information to use
+        when compiling the source files in 'sources'.  For platforms and
+        compilers where "command line" makes sense, this is typically a
+        list of command-line arguments, but for other platforms it could
+        be anything.
+      extra_link_args : [string]
+        any extra platform- and compiler-specific information to use
+        when linking object files together to create the extension (or
+        to create a new static Python interpreter).  Similar
+        interpretation as for 'extra_compile_args'.
+      export_symbols : [string]
+        list of symbols to be exported from a shared extension.  Not
+        used on all platforms, and not generally necessary for Python
+        extensions, which typically export exactly one symbol: "init" +
+        extension_name.
+      swig_opts : [string]
+        any extra options to pass to SWIG if a source file has the .i
+        extension.
+      depends : [string]
+        list of files that the extension depends on
+      language : string
+        extension language (i.e. "c", "c++", "objc"). Will be detected
+        from the source extensions if not provided.
+    """
+
+    # When adding arguments to this constructor, be sure to update
+    # setup_keywords in core.py.
+    def __init__ (self, name, sources,
+                  include_dirs=None,
+                  define_macros=None,
+                  undef_macros=None,
+                  library_dirs=None,
+                  libraries=None,
+                  runtime_library_dirs=None,
+                  extra_objects=None,
+                  extra_compile_args=None,
+                  extra_link_args=None,
+                  export_symbols=None,
+                  swig_opts = None,
+                  depends=None,
+                  language=None,
+                  **kw                      # To catch unknown keywords
+                 ):
+        assert type(name) is StringType, "'name' must be a string"
+        assert (type(sources) is ListType and
+                map(type, sources) == [StringType]*len(sources)), \
+                "'sources' must be a list of strings"
+
+        self.name = name
+        self.sources = sources
+        self.include_dirs = include_dirs or []
+        self.define_macros = define_macros or []
+        self.undef_macros = undef_macros or []
+        self.library_dirs = library_dirs or []
+        self.libraries = libraries or []
+        self.runtime_library_dirs = runtime_library_dirs or []
+        self.extra_objects = extra_objects or []
+        self.extra_compile_args = extra_compile_args or []
+        self.extra_link_args = extra_link_args or []
+        self.export_symbols = export_symbols or []
+        self.swig_opts = swig_opts or []
+        self.depends = depends or []
+        self.language = language
+
+        # If there are unknown keyword options, warn about them
+        if len(kw):
+            L = kw.keys() ; L.sort()
+            L = map(repr, L)
+            msg = "Unknown Extension options: " + string.join(L, ', ')
+            if warnings is not None:
+                warnings.warn(msg)
+            else:
+                sys.stderr.write(msg + '\n')
+# class Extension
+
+
+def read_setup_file (filename):
+    from distutils.sysconfig import \
+         parse_makefile, expand_makefile_vars, _variable_rx
+    from distutils.text_file import TextFile
+    from distutils.util import split_quoted
+
+    # First pass over the file to gather "VAR = VALUE" assignments.
+    vars = parse_makefile(filename)
+
+    # Second pass to gobble up the real content: lines of the form
+    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
+    file = TextFile(filename,
+                    strip_comments=1, skip_blanks=1, join_lines=1,
+                    lstrip_ws=1, rstrip_ws=1)
+    try:
+        extensions = []
+
+        while 1:
+            line = file.readline()
+            if line is None:                # eof
+                break
+            if _variable_rx.match(line):    # VAR=VALUE, handled in first pass
+                continue
+
+                if line[0] == line[-1] == "*":
+                    file.warn("'%s' lines not handled yet" % line)
+                    continue
+
+            #print "original line: " + line
+            line = expand_makefile_vars(line, vars)
+            words = split_quoted(line)
+            #print "expanded line: " + line
+
+            # NB. this parses a slightly different syntax than the old
+            # makesetup script: here, there must be exactly one extension per
+            # line, and it must be the first word of the line.  I have no idea
+            # why the old syntax supported multiple extensions per line, as
+            # they all wind up being the same.
+
+            module = words[0]
+            ext = Extension(module, [])
+            append_next_word = None
+
+            for word in words[1:]:
+                if append_next_word is not None:
+                    append_next_word.append(word)
+                    append_next_word = None
+                    continue
+
+                suffix = os.path.splitext(word)[1]
+                switch = word[0:2] ; value = word[2:]
+
+                if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
+                    # hmm, should we do something about C vs. C++ sources?
+                    # or leave it up to the CCompiler implementation to
+                    # worry about?
+                    ext.sources.append(word)
+                elif switch == "-I":
+                    ext.include_dirs.append(value)
+                elif switch == "-D":
+                    equals = string.find(value, "=")
+                    if equals == -1:        # bare "-DFOO" -- no value
+                        ext.define_macros.append((value, None))
+                    else:                   # "-DFOO=blah"
+                        ext.define_macros.append((value[0:equals],
+                                                  value[equals+2:]))
+                elif switch == "-U":
+                    ext.undef_macros.append(value)
+                elif switch == "-C":        # only here 'cause makesetup has it!
+                    ext.extra_compile_args.append(word)
+                elif switch == "-l":
+                    ext.libraries.append(value)
+                elif switch == "-L":
+                    ext.library_dirs.append(value)
+                elif switch == "-R":
+                    ext.runtime_library_dirs.append(value)
+                elif word == "-rpath":
+                    append_next_word = ext.runtime_library_dirs
+                elif word == "-Xlinker":
+                    append_next_word = ext.extra_link_args
+                elif word == "-Xcompiler":
+                    append_next_word = ext.extra_compile_args
+                elif switch == "-u":
+                    ext.extra_link_args.append(word)
+                    if not value:
+                        append_next_word = ext.extra_link_args
+                elif word == "-Xcompiler":
+                    append_next_word = ext.extra_compile_args
+                elif switch == "-u":
+                    ext.extra_link_args.append(word)
+                    if not value:
+                        append_next_word = ext.extra_link_args
+                elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
+                    # NB. a really faithful emulation of makesetup would
+                    # append a .o file to extra_objects only if it
+                    # had a slash in it; otherwise, it would s/.o/.c/
+                    # and append it to sources.  Hmmmm.
+                    ext.extra_objects.append(word)
+                else:
+                    file.warn("unrecognized argument '%s'" % word)
+
+            extensions.append(ext)
+    finally:
+        file.close()
+
+        #print "module:", module
+        #print "source files:", source_files
+        #print "cpp args:", cpp_args
+        #print "lib args:", library_args
+
+        #extensions[module] = { 'sources': source_files,
+        #                       'cpp_args': cpp_args,
+        #                       'lib_args': library_args }
+
+    return extensions
+
+# read_setup_file ()
diff --git a/lib/distutils/distutils/fancy_getopt.py b/lib/distutils/distutils/fancy_getopt.py
new file mode 100644
index 0000000..2dea948
--- /dev/null
+++ b/lib/distutils/distutils/fancy_getopt.py
@@ -0,0 +1,484 @@
+"""distutils.fancy_getopt
+
+Wrapper around the standard getopt module that provides the following
+additional features:
+  * short and long options are tied together
+  * options have help strings, so fancy_getopt could potentially
+    create a complete usage summary
+  * options set attributes of a passed-in object
+"""
+
+__revision__ = "$Id$"
+
+import sys
+import string
+import re
+import getopt
+from distutils.errors import DistutilsGetoptError, DistutilsArgError
+
+# Much like command_re in distutils.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+# This is used to translate long options to legitimate Python identifiers
+# (for use as attributes of some object).
+longopt_xlate = string.maketrans('-', '_')
+
+class FancyGetopt:
+    """Wrapper around the standard 'getopt()' module that provides some
+    handy extra functionality:
+      * short and long options are tied together
+      * options have help strings, and help text can be assembled
+        from them
+      * options set attributes of a passed-in object
+      * boolean options can have "negative aliases" -- eg. if
+        --quiet is the "negative alias" of --verbose, then "--quiet"
+        on the command line sets 'verbose' to false
+    """
+
+    def __init__ (self, option_table=None):
+
+        # The option table is (currently) a list of tuples.  The
+        # tuples may have 3 or four values:
+        #   (long_option, short_option, help_string [, repeatable])
+        # if an option takes an argument, its long_option should have '='
+        # appended; short_option should just be a single character, no ':'
+        # in any case.  If a long_option doesn't have a corresponding
+        # short_option, short_option should be None.  All option tuples
+        # must have long options.
+        self.option_table = option_table
+
+        # 'option_index' maps long option names to entries in the option
+        # table (ie. those 3-tuples).
+        self.option_index = {}
+        if self.option_table:
+            self._build_index()
+
+        # 'alias' records (duh) alias options; {'foo': 'bar'} means
+        # --foo is an alias for --bar
+        self.alias = {}
+
+        # 'negative_alias' keeps track of options that are the boolean
+        # opposite of some other option
+        self.negative_alias = {}
+
+        # These keep track of the information in the option table.  We
+        # don't actually populate these structures until we're ready to
+        # parse the command-line, since the 'option_table' passed in here
+        # isn't necessarily the final word.
+        self.short_opts = []
+        self.long_opts = []
+        self.short2long = {}
+        self.attr_name = {}
+        self.takes_arg = {}
+
+        # And 'option_order' is filled up in 'getopt()'; it records the
+        # original order of options (and their values) on the command-line,
+        # but expands short options, converts aliases, etc.
+        self.option_order = []
+
+    # __init__ ()
+
+
+    def _build_index (self):
+        self.option_index.clear()
+        for option in self.option_table:
+            self.option_index[option[0]] = option
+
+    def set_option_table (self, option_table):
+        self.option_table = option_table
+        self._build_index()
+
+    def add_option (self, long_option, short_option=None, help_string=None):
+        if long_option in self.option_index:
+            raise DistutilsGetoptError, \
+                  "option conflict: already an option '%s'" % long_option
+        else:
+            option = (long_option, short_option, help_string)
+            self.option_table.append(option)
+            self.option_index[long_option] = option
+
+
+    def has_option (self, long_option):
+        """Return true if the option table for this parser has an
+        option with long name 'long_option'."""
+        return long_option in self.option_index
+
+    def get_attr_name (self, long_option):
+        """Translate long option name 'long_option' to the form it
+        has as an attribute of some object: ie., translate hyphens
+        to underscores."""
+        return string.translate(long_option, longopt_xlate)
+
+
+    def _check_alias_dict (self, aliases, what):
+        assert isinstance(aliases, dict)
+        for (alias, opt) in aliases.items():
+            if alias not in self.option_index:
+                raise DistutilsGetoptError, \
+                      ("invalid %s '%s': "
+                       "option '%s' not defined") % (what, alias, alias)
+            if opt not in self.option_index:
+                raise DistutilsGetoptError, \
+                      ("invalid %s '%s': "
+                       "aliased option '%s' not defined") % (what, alias, opt)
+
+    def set_aliases (self, alias):
+        """Set the aliases for this option parser."""
+        self._check_alias_dict(alias, "alias")
+        self.alias = alias
+
+    def set_negative_aliases (self, negative_alias):
+        """Set the negative aliases for this option parser.
+        'negative_alias' should be a dictionary mapping option names to
+        option names, both the key and value must already be defined
+        in the option table."""
+        self._check_alias_dict(negative_alias, "negative alias")
+        self.negative_alias = negative_alias
+
+
+    def _grok_option_table (self):
+        """Populate the various data structures that keep tabs on the
+        option table.  Called by 'getopt()' before it can do anything
+        worthwhile.
+        """
+        self.long_opts = []
+        self.short_opts = []
+        self.short2long.clear()
+        self.repeat = {}
+
+        for option in self.option_table:
+            if len(option) == 3:
+                long, short, help = option
+                repeat = 0
+            elif len(option) == 4:
+                long, short, help, repeat = option
+            else:
+                # the option table is part of the code, so simply
+                # assert that it is correct
+                raise ValueError, "invalid option tuple: %r" % (option,)
+
+            # Type- and value-check the option names
+            if not isinstance(long, str) or len(long) < 2:
+                raise DistutilsGetoptError, \
+                      ("invalid long option '%s': "
+                       "must be a string of length >= 2") % long
+
+            if (not ((short is None) or
+                     (isinstance(short, str) and len(short) == 1))):
+                raise DistutilsGetoptError, \
+                      ("invalid short option '%s': "
+                       "must a single character or None") % short
+
+            self.repeat[long] = repeat
+            self.long_opts.append(long)
+
+            if long[-1] == '=':             # option takes an argument?
+                if short: short = short + ':'
+                long = long[0:-1]
+                self.takes_arg[long] = 1
+            else:
+
+                # Is option is a "negative alias" for some other option (eg.
+                # "quiet" == "!verbose")?
+                alias_to = self.negative_alias.get(long)
+                if alias_to is not None:
+                    if self.takes_arg[alias_to]:
+                        raise DistutilsGetoptError, \
+                              ("invalid negative alias '%s': "
+                               "aliased option '%s' takes a value") % \
+                               (long, alias_to)
+
+                    self.long_opts[-1] = long # XXX redundant?!
+                    self.takes_arg[long] = 0
+
+                else:
+                    self.takes_arg[long] = 0
+
+            # If this is an alias option, make sure its "takes arg" flag is
+            # the same as the option it's aliased to.
+            alias_to = self.alias.get(long)
+            if alias_to is not None:
+                if self.takes_arg[long] != self.takes_arg[alias_to]:
+                    raise DistutilsGetoptError, \
+                          ("invalid alias '%s': inconsistent with "
+                           "aliased option '%s' (one of them takes a value, "
+                           "the other doesn't") % (long, alias_to)
+
+
+            # Now enforce some bondage on the long option name, so we can
+            # later translate it to an attribute name on some object.  Have
+            # to do this a bit late to make sure we've removed any trailing
+            # '='.
+            if not longopt_re.match(long):
+                raise DistutilsGetoptError, \
+                      ("invalid long option name '%s' " +
+                       "(must be letters, numbers, hyphens only") % long
+
+            self.attr_name[long] = self.get_attr_name(long)
+            if short:
+                self.short_opts.append(short)
+                self.short2long[short[0]] = long
+
+        # for option_table
+
+    # _grok_option_table()
+
+
+    def getopt (self, args=None, object=None):
+        """Parse command-line options in args. Store as attributes on object.
+
+        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
+        'object' is None or not supplied, creates a new OptionDummy
+        object, stores option values there, and returns a tuple (args,
+        object).  If 'object' is supplied, it is modified in place and
+        'getopt()' just returns 'args'; in both cases, the returned
+        'args' is a modified copy of the passed-in 'args' list, which
+        is left untouched.
+        """
+        if args is None:
+            args = sys.argv[1:]
+        if object is None:
+            object = OptionDummy()
+            created_object = 1
+        else:
+            created_object = 0
+
+        self._grok_option_table()
+
+        short_opts = string.join(self.short_opts)
+        try:
+            opts, args = getopt.getopt(args, short_opts, self.long_opts)
+        except getopt.error, msg:
+            raise DistutilsArgError, msg
+
+        for opt, val in opts:
+            if len(opt) == 2 and opt[0] == '-': # it's a short option
+                opt = self.short2long[opt[1]]
+            else:
+                assert len(opt) > 2 and opt[:2] == '--'
+                opt = opt[2:]
+
+            alias = self.alias.get(opt)
+            if alias:
+                opt = alias
+
+            if not self.takes_arg[opt]:     # boolean option?
+                assert val == '', "boolean option can't have value"
+                alias = self.negative_alias.get(opt)
+                if alias:
+                    opt = alias
+                    val = 0
+                else:
+                    val = 1
+
+            attr = self.attr_name[opt]
+            # The only repeating option at the moment is 'verbose'.
+            # It has a negative option -q quiet, which should set verbose = 0.
+            if val and self.repeat.get(attr) is not None:
+                val = getattr(object, attr, 0) + 1
+            setattr(object, attr, val)
+            self.option_order.append((opt, val))
+
+        # for opts
+        if created_object:
+            return args, object
+        else:
+            return args
+
+    # getopt()
+
+
+    def get_option_order (self):
+        """Returns the list of (option, value) tuples processed by the
+        previous run of 'getopt()'.  Raises RuntimeError if
+        'getopt()' hasn't been called yet.
+        """
+        if self.option_order is None:
+            raise RuntimeError, "'getopt()' hasn't been called yet"
+        else:
+            return self.option_order
+
+
+    def generate_help (self, header=None):
+        """Generate help text (a list of strings, one per suggested line of
+        output) from the option table for this FancyGetopt object.
+        """
+        # Blithely assume the option table is good: probably wouldn't call
+        # 'generate_help()' unless you've already called 'getopt()'.
+
+        # First pass: determine maximum length of long option names
+        max_opt = 0
+        for option in self.option_table:
+            long = option[0]
+            short = option[1]
+            l = len(long)
+            if long[-1] == '=':
+                l = l - 1
+            if short is not None:
+                l = l + 5                   # " (-x)" where short == 'x'
+            if l > max_opt:
+                max_opt = l
+
+        opt_width = max_opt + 2 + 2 + 2     # room for indent + dashes + gutter
+
+        # Typical help block looks like this:
+        #   --foo       controls foonabulation
+        # Help block for longest option looks like this:
+        #   --flimflam  set the flim-flam level
+        # and with wrapped text:
+        #   --flimflam  set the flim-flam level (must be between
+        #               0 and 100, except on Tuesdays)
+        # Options with short names will have the short name shown (but
+        # it doesn't contribute to max_opt):
+        #   --foo (-f)  controls foonabulation
+        # If adding the short option would make the left column too wide,
+        # we push the explanation off to the next line
+        #   --flimflam (-l)
+        #               set the flim-flam level
+        # Important parameters:
+        #   - 2 spaces before option block start lines
+        #   - 2 dashes for each long option name
+        #   - min. 2 spaces between option and explanation (gutter)
+        #   - 5 characters (incl. space) for short option name
+
+        # Now generate lines of help text.  (If 80 columns were good enough
+        # for Jesus, then 78 columns are good enough for me!)
+        line_width = 78
+        text_width = line_width - opt_width
+        big_indent = ' ' * opt_width
+        if header:
+            lines = [header]
+        else:
+            lines = ['Option summary:']
+
+        for option in self.option_table:
+            long, short, help = option[:3]
+            text = wrap_text(help, text_width)
+            if long[-1] == '=':
+                long = long[0:-1]
+
+            # Case 1: no short option at all (makes life easy)
+            if short is None:
+                if text:
+                    lines.append("  --%-*s  %s" % (max_opt, long, text[0]))
+                else:
+                    lines.append("  --%-*s  " % (max_opt, long))
+
+            # Case 2: we have a short option, so we have to include it
+            # just after the long option
+            else:
+                opt_names = "%s (-%s)" % (long, short)
+                if text:
+                    lines.append("  --%-*s  %s" %
+                                 (max_opt, opt_names, text[0]))
+                else:
+                    lines.append("  --%-*s" % opt_names)
+
+            for l in text[1:]:
+                lines.append(big_indent + l)
+
+        # for self.option_table
+
+        return lines
+
+    # generate_help ()
+
+    def print_help (self, header=None, file=None):
+        if file is None:
+            file = sys.stdout
+        for line in self.generate_help(header):
+            file.write(line + "\n")
+
+# class FancyGetopt
+
+
+def fancy_getopt (options, negative_opt, object, args):
+    parser = FancyGetopt(options)
+    parser.set_negative_aliases(negative_opt)
+    return parser.getopt(args, object)
+
+
+WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace))
+
+def wrap_text (text, width):
+    """wrap_text(text : string, width : int) -> [string]
+
+    Split 'text' into multiple lines of no more than 'width' characters
+    each, and return the list of strings that results.
+    """
+
+    if text is None:
+        return []
+    if len(text) <= width:
+        return [text]
+
+    text = string.expandtabs(text)
+    text = string.translate(text, WS_TRANS)
+    chunks = re.split(r'( +|-+)', text)
+    chunks = filter(None, chunks)      # ' - ' results in empty strings
+    lines = []
+
+    while chunks:
+
+        cur_line = []                   # list of chunks (to-be-joined)
+        cur_len = 0                     # length of current line
+
+        while chunks:
+            l = len(chunks[0])
+            if cur_len + l <= width:    # can squeeze (at least) this chunk in
+                cur_line.append(chunks[0])
+                del chunks[0]
+                cur_len = cur_len + l
+            else:                       # this line is full
+                # drop last chunk if all space
+                if cur_line and cur_line[-1][0] == ' ':
+                    del cur_line[-1]
+                break
+
+        if chunks:                      # any chunks left to process?
+
+            # if the current line is still empty, then we had a single
+            # chunk that's too big too fit on a line -- so we break
+            # down and break it up at the line width
+            if cur_len == 0:
+                cur_line.append(chunks[0][0:width])
+                chunks[0] = chunks[0][width:]
+
+            # all-whitespace chunks at the end of a line can be discarded
+            # (and we know from the re.split above that if a chunk has
+            # *any* whitespace, it is *all* whitespace)
+            if chunks[0][0] == ' ':
+                del chunks[0]
+
+        # and store this line in the list-of-all-lines -- as a single
+        # string, of course!
+        lines.append(string.join(cur_line, ''))
+
+    # while chunks
+
+    return lines
+
+
+def translate_longopt(opt):
+    """Convert a long option name to a valid Python identifier by
+    changing "-" to "_".
+    """
+    return string.translate(opt, longopt_xlate)
+
+
+class OptionDummy:
+    """Dummy class just used as a place to hold command-line option
+    values as instance attributes."""
+
+    def __init__ (self, options=[]):
+        """Create a new OptionDummy instance.  The attributes listed in
+        'options' will be initialized to None."""
+        for opt in options:
+            setattr(self, opt, None)
diff --git a/lib/distutils/distutils/file_util.py b/lib/distutils/distutils/file_util.py
new file mode 100644
index 0000000..b9f0786
--- /dev/null
+++ b/lib/distutils/distutils/file_util.py
@@ -0,0 +1,231 @@
+"""distutils.file_util
+
+Utility functions for operating on single files.
+"""
+
+__revision__ = "$Id$"
+
+import os
+from distutils.errors import DistutilsFileError
+from distutils import log
+
+# for generating verbose output in 'copy_file()'
+_copy_action = {None: 'copying',
+                'hard': 'hard linking',
+                'sym': 'symbolically linking'}
+
+
+def _copy_file_contents(src, dst, buffer_size=16*1024):
+    """Copy the file 'src' to 'dst'.
+
+    Both must be filenames. Any error opening either file, reading from
+    'src', or writing to 'dst', raises DistutilsFileError.  Data is
+    read/written in chunks of 'buffer_size' bytes (default 16k).  No attempt
+    is made to handle anything apart from regular files.
+    """
+    # Stolen from shutil module in the standard library, but with
+    # custom error-handling added.
+    fsrc = None
+    fdst = None
+    try:
+        try:
+            fsrc = open(src, 'rb')
+        except os.error, (errno, errstr):
+            raise DistutilsFileError("could not open '%s': %s" % (src, errstr))
+
+        if os.path.exists(dst):
+            try:
+                os.unlink(dst)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError(
+                      "could not delete '%s': %s" % (dst, errstr))
+
+        try:
+            fdst = open(dst, 'wb')
+        except os.error, (errno, errstr):
+            raise DistutilsFileError(
+                  "could not create '%s': %s" % (dst, errstr))
+
+        while 1:
+            try:
+                buf = fsrc.read(buffer_size)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError(
+                      "could not read from '%s': %s" % (src, errstr))
+
+            if not buf:
+                break
+
+            try:
+                fdst.write(buf)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError(
+                      "could not write to '%s': %s" % (dst, errstr))
+
+    finally:
+        if fdst:
+            fdst.close()
+        if fsrc:
+            fsrc.close()
+
+def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0,
+              link=None, verbose=1, dry_run=0):
+    """Copy a file 'src' to 'dst'.
+
+    If 'dst' is a directory, then 'src' is copied there with the same name;
+    otherwise, it must be a filename.  (If the file exists, it will be
+    ruthlessly clobbered.)  If 'preserve_mode' is true (the default),
+    the file's mode (type and permission bits, or whatever is analogous on
+    the current platform) is copied.  If 'preserve_times' is true (the
+    default), the last-modified and last-access times are copied as well.
+    If 'update' is true, 'src' will only be copied if 'dst' does not exist,
+    or if 'dst' does exist but is older than 'src'.
+
+    'link' allows you to make hard links (os.link) or symbolic links
+    (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
+    None (the default), files are copied.  Don't set 'link' on systems that
+    don't support it: 'copy_file()' doesn't check if hard or symbolic
+    linking is available.
+
+    Under Mac OS, uses the native file copy function in macostools; on
+    other systems, uses '_copy_file_contents()' to copy file contents.
+
+    Return a tuple (dest_name, copied): 'dest_name' is the actual name of
+    the output file, and 'copied' is true if the file was copied (or would
+    have been copied, if 'dry_run' true).
+    """
+    # XXX if the destination file already exists, we clobber it if
+    # copying, but blow up if linking.  Hmmm.  And I don't know what
+    # macostools.copyfile() does.  Should definitely be consistent, and
+    # should probably blow up if destination exists and we would be
+    # changing it (ie. it's not already a hard/soft link to src OR
+    # (not update) and (src newer than dst).
+
+    from distutils.dep_util import newer
+    from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
+
+    if not os.path.isfile(src):
+        raise DistutilsFileError(
+              "can't copy '%s': doesn't exist or not a regular file" % src)
+
+    if os.path.isdir(dst):
+        dir = dst
+        dst = os.path.join(dst, os.path.basename(src))
+    else:
+        dir = os.path.dirname(dst)
+
+    if update and not newer(src, dst):
+        if verbose >= 1:
+            log.debug("not copying %s (output up-to-date)", src)
+        return dst, 0
+
+    try:
+        action = _copy_action[link]
+    except KeyError:
+        raise ValueError("invalid value '%s' for 'link' argument" % link)
+
+    if verbose >= 1:
+        if os.path.basename(dst) == os.path.basename(src):
+            log.info("%s %s -> %s", action, src, dir)
+        else:
+            log.info("%s %s -> %s", action, src, dst)
+
+    if dry_run:
+        return (dst, 1)
+
+    # If linking (hard or symbolic), use the appropriate system call
+    # (Unix only, of course, but that's the caller's responsibility)
+    if link == 'hard':
+        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+            os.link(src, dst)
+    elif link == 'sym':
+        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+            os.symlink(src, dst)
+
+    # Otherwise (non-Mac, not linking), copy the file contents and
+    # (optionally) copy the times and mode.
+    else:
+        _copy_file_contents(src, dst)
+        if preserve_mode or preserve_times:
+            st = os.stat(src)
+
+            # According to David Ascher <da@ski.org>, utime() should be done
+            # before chmod() (at least under NT).
+            if preserve_times:
+                os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
+            if preserve_mode:
+                os.chmod(dst, S_IMODE(st[ST_MODE]))
+
+    return (dst, 1)
+
+# XXX I suspect this is Unix-specific -- need porting help!
+def move_file (src, dst, verbose=1, dry_run=0):
+    """Move a file 'src' to 'dst'.
+
+    If 'dst' is a directory, the file will be moved into it with the same
+    name; otherwise, 'src' is just renamed to 'dst'.  Return the new
+    full name of the file.
+
+    Handles cross-device moves on Unix using 'copy_file()'.  What about
+    other systems???
+    """
+    from os.path import exists, isfile, isdir, basename, dirname
+    import errno
+
+    if verbose >= 1:
+        log.info("moving %s -> %s", src, dst)
+
+    if dry_run:
+        return dst
+
+    if not isfile(src):
+        raise DistutilsFileError("can't move '%s': not a regular file" % src)
+
+    if isdir(dst):
+        dst = os.path.join(dst, basename(src))
+    elif exists(dst):
+        raise DistutilsFileError(
+              "can't move '%s': destination '%s' already exists" %
+              (src, dst))
+
+    if not isdir(dirname(dst)):
+        raise DistutilsFileError(
+              "can't move '%s': destination '%s' not a valid path" % \
+              (src, dst))
+
+    copy_it = 0
+    try:
+        os.rename(src, dst)
+    except os.error, (num, msg):
+        if num == errno.EXDEV:
+            copy_it = 1
+        else:
+            raise DistutilsFileError(
+                  "couldn't move '%s' to '%s': %s" % (src, dst, msg))
+
+    if copy_it:
+        copy_file(src, dst, verbose=verbose)
+        try:
+            os.unlink(src)
+        except os.error, (num, msg):
+            try:
+                os.unlink(dst)
+            except os.error:
+                pass
+            raise DistutilsFileError(
+                  ("couldn't move '%s' to '%s' by copy/delete: " +
+                   "delete '%s' failed: %s") %
+                  (src, dst, src, msg))
+    return dst
+
+
+def write_file (filename, contents):
+    """Create a file with the specified name and write 'contents' (a
+    sequence of strings without line terminators) to it.
+    """
+    f = open(filename, "w")
+    try:
+        for line in contents:
+            f.write(line + "\n")
+    finally:
+        f.close()
diff --git a/lib/distutils/distutils/filelist.py b/lib/distutils/distutils/filelist.py
new file mode 100644
index 0000000..4aac6d3
--- /dev/null
+++ b/lib/distutils/distutils/filelist.py
@@ -0,0 +1,336 @@
+"""distutils.filelist
+
+Provides the FileList class, used for poking about the filesystem
+and building lists of files.
+"""
+
+__revision__ = "$Id$"
+
+import os, re
+import fnmatch
+from distutils.util import convert_path
+from distutils.errors import DistutilsTemplateError, DistutilsInternalError
+from distutils import log
+
+class FileList:
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+
+    Instance attributes:
+      dir
+        directory from which files will be taken -- only used if
+        'allfiles' not supplied to constructor
+      files
+        list of filenames currently being built/filtered/manipulated
+      allfiles
+        complete list of files under consideration (ie. without any
+        filtering applied)
+    """
+
+    def __init__(self, warn=None, debug_print=None):
+        # ignore argument to FileList, but keep them for backwards
+        # compatibility
+        self.allfiles = None
+        self.files = []
+
+    def set_allfiles(self, allfiles):
+        self.allfiles = allfiles
+
+    def findall(self, dir=os.curdir):
+        self.allfiles = findall(dir)
+
+    def debug_print(self, msg):
+        """Print 'msg' to stdout if the global DEBUG (taken from the
+        DISTUTILS_DEBUG environment variable) flag is true.
+        """
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+
+    # -- List-like methods ---------------------------------------------
+
+    def append(self, item):
+        self.files.append(item)
+
+    def extend(self, items):
+        self.files.extend(items)
+
+    def sort(self):
+        # Not a strict lexical sort!
+        sortable_files = map(os.path.split, self.files)
+        sortable_files.sort()
+        self.files = []
+        for sort_tuple in sortable_files:
+            self.files.append(os.path.join(*sort_tuple))
+
+
+    # -- Other miscellaneous utility methods ---------------------------
+
+    def remove_duplicates(self):
+        # Assumes list has been sorted!
+        for i in range(len(self.files) - 1, 0, -1):
+            if self.files[i] == self.files[i - 1]:
+                del self.files[i]
+
+
+    # -- "File template" methods ---------------------------------------
+
+    def _parse_template_line(self, line):
+        words = line.split()
+        action = words[0]
+
+        patterns = dir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise DistutilsTemplateError, \
+                      "'%s' expects <pattern1> <pattern2> ..." % action
+
+            patterns = map(convert_path, words[1:])
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise DistutilsTemplateError, \
+                      "'%s' expects <dir> <pattern1> <pattern2> ..." % action
+
+            dir = convert_path(words[1])
+            patterns = map(convert_path, words[2:])
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise DistutilsTemplateError, \
+                     "'%s' expects a single <dir_pattern>" % action
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise DistutilsTemplateError, "unknown action '%s'" % action
+
+        return (action, patterns, dir, dir_pattern)
+
+    def process_template_line(self, line):
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        action, patterns, dir, dir_pattern = self._parse_template_line(line)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            self.debug_print("include " + ' '.join(patterns))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, anchor=1):
+                    log.warn("warning: no files found matching '%s'",
+                             pattern)
+
+        elif action == 'exclude':
+            self.debug_print("exclude " + ' '.join(patterns))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=1):
+                    log.warn(("warning: no previously-included files "
+                              "found matching '%s'"), pattern)
+
+        elif action == 'global-include':
+            self.debug_print("global-include " + ' '.join(patterns))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, anchor=0):
+                    log.warn(("warning: no files found matching '%s' " +
+                              "anywhere in distribution"), pattern)
+
+        elif action == 'global-exclude':
+            self.debug_print("global-exclude " + ' '.join(patterns))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=0):
+                    log.warn(("warning: no previously-included files matching "
+                              "'%s' found anywhere in distribution"),
+                             pattern)
+
+        elif action == 'recursive-include':
+            self.debug_print("recursive-include %s %s" %
+                             (dir, ' '.join(patterns)))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, prefix=dir):
+                    log.warn(("warning: no files found matching '%s' " +
+                                "under directory '%s'"),
+                             pattern, dir)
+
+        elif action == 'recursive-exclude':
+            self.debug_print("recursive-exclude %s %s" %
+                             (dir, ' '.join(patterns)))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, prefix=dir):
+                    log.warn(("warning: no previously-included files matching "
+                              "'%s' found under directory '%s'"),
+                             pattern, dir)
+
+        elif action == 'graft':
+            self.debug_print("graft " + dir_pattern)
+            if not self.include_pattern(None, prefix=dir_pattern):
+                log.warn("warning: no directories found matching '%s'",
+                         dir_pattern)
+
+        elif action == 'prune':
+            self.debug_print("prune " + dir_pattern)
+            if not self.exclude_pattern(None, prefix=dir_pattern):
+                log.warn(("no previously-included directories found " +
+                          "matching '%s'"), dir_pattern)
+        else:
+            raise DistutilsInternalError, \
+                  "this cannot happen: invalid action '%s'" % action
+
+    # -- Filtering/selection methods -----------------------------------
+
+    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.
+
+        Patterns are not quite the same as implemented by the 'fnmatch'
+        module: '*' and '?'  match non-special characters, where "special"
+        is platform-dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return 1 if files are found.
+        """
+        files_found = 0
+        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+        self.debug_print("include_pattern: applying regex r'%s'" %
+                         pattern_re.pattern)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.debug_print(" adding " + name)
+                self.files.append(name)
+                files_found = 1
+
+        return files_found
+
+
+    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.
+
+        Other parameters are the same as for 'include_pattern()', above.
+        The list 'self.files' is modified in place. Return 1 if files are
+        found.
+        """
+        files_found = 0
+        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+        self.debug_print("exclude_pattern: applying regex r'%s'" %
+                         pattern_re.pattern)
+        for i in range(len(self.files)-1, -1, -1):
+            if pattern_re.search(self.files[i]):
+                self.debug_print(" removing " + self.files[i])
+                del self.files[i]
+                files_found = 1
+
+        return files_found
+
+
+# ----------------------------------------------------------------------
+# Utility functions
+
+def findall(dir = os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    from stat import ST_MODE, S_ISREG, S_ISDIR, S_ISLNK
+
+    list = []
+    stack = [dir]
+    pop = stack.pop
+    push = stack.append
+
+    while stack:
+        dir = pop()
+        names = os.listdir(dir)
+
+        for name in names:
+            if dir != os.curdir:        # avoid the dreaded "./" syndrome
+                fullname = os.path.join(dir, name)
+            else:
+                fullname = name
+
+            # Avoid excess stat calls -- just one will do, thank you!
+            stat = os.stat(fullname)
+            mode = stat[ST_MODE]
+            if S_ISREG(mode):
+                list.append(fullname)
+            elif S_ISDIR(mode) and not S_ISLNK(mode):
+                push(fullname)
+
+    return list
+
+
+def glob_to_re(pattern):
+    """Translate a shell-like glob pattern to a regular expression.
+
+    Return a string containing the regex.  Differs from
+    'fnmatch.translate()' in that '*' does not match "special characters"
+    (which are platform-specific).
+    """
+    pattern_re = fnmatch.translate(pattern)
+
+    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+    # and by extension they shouldn't match such "special characters" under
+    # any OS.  So change all non-escaped dots in the RE to match any
+    # character except the special characters.
+    # XXX currently the "special characters" are just slash -- i.e. this is
+    # Unix-only.
+    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
+
+    return pattern_re
+
+
+def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
+    """Translate a shell-like wildcard pattern to a compiled regular
+    expression.
+
+    Return the compiled regex.  If 'is_regex' true,
+    then 'pattern' is directly compiled to a regex (if it's a string)
+    or just returned as-is (assumes it's a regex object).
+    """
+    if is_regex:
+        if isinstance(pattern, str):
+            return re.compile(pattern)
+        else:
+            return pattern
+
+    if pattern:
+        pattern_re = glob_to_re(pattern)
+    else:
+        pattern_re = ''
+
+    if prefix is not None:
+        # ditch end of pattern character
+        empty_pattern = glob_to_re('')
+        prefix_re = glob_to_re(prefix)[:-len(empty_pattern)]
+        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+    else:                               # no prefix -- respect anchor flag
+        if anchor:
+            pattern_re = "^" + pattern_re
+
+    return re.compile(pattern_re)
diff --git a/lib/distutils/distutils/log.py b/lib/distutils/distutils/log.py
new file mode 100644
index 0000000..7588570
--- /dev/null
+++ b/lib/distutils/distutils/log.py
@@ -0,0 +1,71 @@
+"""A simple log mechanism styled after PEP 282."""
+
+# The class here is styled after PEP 282 so that it could later be
+# replaced with a standard Python logging implementation.
+
+DEBUG = 1
+INFO = 2
+WARN = 3
+ERROR = 4
+FATAL = 5
+
+import sys
+
+class Log:
+
+    def __init__(self, threshold=WARN):
+        self.threshold = threshold
+
+    def _log(self, level, msg, args):
+        if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
+            raise ValueError('%s wrong log level' % str(level))
+
+        if level >= self.threshold:
+            if args:
+                msg = msg % args
+            if level in (WARN, ERROR, FATAL):
+                stream = sys.stderr
+            else:
+                stream = sys.stdout
+            stream.write('%s\n' % msg)
+            stream.flush()
+
+    def log(self, level, msg, *args):
+        self._log(level, msg, args)
+
+    def debug(self, msg, *args):
+        self._log(DEBUG, msg, args)
+
+    def info(self, msg, *args):
+        self._log(INFO, msg, args)
+
+    def warn(self, msg, *args):
+        self._log(WARN, msg, args)
+
+    def error(self, msg, *args):
+        self._log(ERROR, msg, args)
+
+    def fatal(self, msg, *args):
+        self._log(FATAL, msg, args)
+
+_global_log = Log()
+log = _global_log.log
+debug = _global_log.debug
+info = _global_log.info
+warn = _global_log.warn
+error = _global_log.error
+fatal = _global_log.fatal
+
+def set_threshold(level):
+    # return the old threshold for use from tests
+    old = _global_log.threshold
+    _global_log.threshold = level
+    return old
+
+def set_verbosity(v):
+    if v <= 0:
+        set_threshold(WARN)
+    elif v == 1:
+        set_threshold(INFO)
+    elif v >= 2:
+        set_threshold(DEBUG)
diff --git a/lib/distutils/distutils/msvc9compiler.py b/lib/distutils/distutils/msvc9compiler.py
new file mode 100644
index 0000000..bf85ac7
--- /dev/null
+++ b/lib/distutils/distutils/msvc9compiler.py
@@ -0,0 +1,764 @@
+"""distutils.msvc9compiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for the Microsoft Visual Studio 2008.
+
+The module is compatible with VS 2005 and VS 2008. You can find legacy support
+for older versions of VS in distutils.msvccompiler.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+# ported to VS2005 and VS 2008 by Christian Heimes
+
+__revision__ = "$Id$"
+
+import os
+import subprocess
+import sys
+import re
+
+from distutils.errors import (DistutilsExecError, DistutilsPlatformError,
+                              CompileError, LibError, LinkError)
+from distutils.ccompiler import CCompiler, gen_lib_options
+from distutils import log
+from distutils.util import get_platform
+
+import _winreg
+
+RegOpenKeyEx = _winreg.OpenKeyEx
+RegEnumKey = _winreg.EnumKey
+RegEnumValue = _winreg.EnumValue
+RegError = _winreg.error
+
+HKEYS = (_winreg.HKEY_USERS,
+         _winreg.HKEY_CURRENT_USER,
+         _winreg.HKEY_LOCAL_MACHINE,
+         _winreg.HKEY_CLASSES_ROOT)
+
+NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
+if NATIVE_WIN64:
+    # Visual C++ is a 32-bit application, so we need to look in
+    # the corresponding registry branch, if we're running a
+    # 64-bit Python on Win64
+    VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
+    VSEXPRESS_BASE = r"Software\Wow6432Node\Microsoft\VCExpress\%0.1f"
+    WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
+    NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
+else:
+    VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
+    VSEXPRESS_BASE = r"Software\Microsoft\VCExpress\%0.1f"
+    WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
+    NET_BASE = r"Software\Microsoft\.NETFramework"
+
+# A map keyed by get_platform() return values to values accepted by
+# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is
+# the param to cross-compile on x86 targetting amd64.)
+PLAT_TO_VCVARS = {
+    'win32' : 'x86',
+    'win-amd64' : 'amd64',
+    'win-ia64' : 'ia64',
+}
+
+class Reg:
+    """Helper class to read values from the registry
+    """
+
+    def get_value(cls, path, key):
+        for base in HKEYS:
+            d = cls.read_values(base, path)
+            if d and key in d:
+                return d[key]
+        raise KeyError(key)
+    get_value = classmethod(get_value)
+
+    def read_keys(cls, base, key):
+        """Return list of registry keys."""
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        L = []
+        i = 0
+        while True:
+            try:
+                k = RegEnumKey(handle, i)
+            except RegError:
+                break
+            L.append(k)
+            i += 1
+        return L
+    read_keys = classmethod(read_keys)
+
+    def read_values(cls, base, key):
+        """Return dict of registry keys and values.
+
+        All names are converted to lowercase.
+        """
+        try:
+            handle = RegOpenKeyEx(base, key)
+        except RegError:
+            return None
+        d = {}
+        i = 0
+        while True:
+            try:
+                name, value, type = RegEnumValue(handle, i)
+            except RegError:
+                break
+            name = name.lower()
+            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
+            i += 1
+        return d
+    read_values = classmethod(read_values)
+
+    def convert_mbcs(s):
+        dec = getattr(s, "decode", None)
+        if dec is not None:
+            try:
+                s = dec("mbcs")
+            except UnicodeError:
+                pass
+        return s
+    convert_mbcs = staticmethod(convert_mbcs)
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.vsbase = VS_BASE % version
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+
+    def load_macros(self, version):
+        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
+        self.set_macro("FrameworkDir", NET_BASE, "installroot")
+        try:
+            if version >= 8.0:
+                self.set_macro("FrameworkSDKDir", NET_BASE,
+                               "sdkinstallrootv2.0")
+            else:
+                raise KeyError("sdkinstallrootv2.0")
+        except KeyError:
+            raise DistutilsPlatformError(
+            """Python was built with Visual Studio 2008;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2008 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+        if version >= 9.0:
+            self.set_macro("FrameworkVersion", self.vsbase, "clr version")
+            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
+        else:
+            p = r"Software\Microsoft\NET Framework Setup\Product"
+            for base in HKEYS:
+                try:
+                    h = RegOpenKeyEx(base, p)
+                except RegError:
+                    continue
+                key = RegEnumKey(h, 0)
+                d = Reg.get_value(base, r"%s\%s" % (p, key))
+                self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = s.replace(k, v)
+        return s
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+    prefix = "MSC v."
+    i = sys.version.find(prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+def removeDuplicates(variable):
+    """Remove duplicate values of an environment variable.
+    """
+    oldList = variable.split(os.pathsep)
+    newList = []
+    for i in oldList:
+        if i not in newList:
+            newList.append(i)
+    newVariable = os.pathsep.join(newList)
+    return newVariable
+
+def find_vcvarsall(version):
+    """Find the vcvarsall.bat file
+
+    At first it tries to find the productdir of VS 2008 in the registry. If
+    that fails it falls back to the VS90COMNTOOLS env var.
+    """
+    vsbase = VS_BASE % version
+    try:
+        productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+                                   "productdir")
+    except KeyError:
+        productdir = None
+
+    # trying Express edition
+    if productdir is None:
+        vsbase = VSEXPRESS_BASE % version
+        try:
+            productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
+                                       "productdir")
+        except KeyError:
+            productdir = None
+            log.debug("Unable to find productdir in registry")
+
+    if not productdir or not os.path.isdir(productdir):
+        toolskey = "VS%0.f0COMNTOOLS" % version
+        toolsdir = os.environ.get(toolskey, None)
+
+        if toolsdir and os.path.isdir(toolsdir):
+            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
+            productdir = os.path.abspath(productdir)
+            if not os.path.isdir(productdir):
+                log.debug("%s is not a valid directory" % productdir)
+                return None
+        else:
+            log.debug("Env var %s is not set or invalid" % toolskey)
+    if not productdir:
+        log.debug("No productdir found")
+        return None
+    vcvarsall = os.path.join(productdir, "vcvarsall.bat")
+    if os.path.isfile(vcvarsall):
+        return vcvarsall
+    log.debug("Unable to find vcvarsall.bat")
+    return None
+
+def query_vcvarsall(version, arch="x86"):
+    """Launch vcvarsall.bat and read the settings from its environment
+    """
+    vcvarsall = find_vcvarsall(version)
+    interesting = set(("include", "lib", "libpath", "path"))
+    result = {}
+
+    if vcvarsall is None:
+        raise DistutilsPlatformError("Unable to find vcvarsall.bat")
+    log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+    popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+    try:
+        stdout, stderr = popen.communicate()
+        if popen.wait() != 0:
+            raise DistutilsPlatformError(stderr.decode("mbcs"))
+
+        stdout = stdout.decode("mbcs")
+        for line in stdout.split("\n"):
+            line = Reg.convert_mbcs(line)
+            if '=' not in line:
+                continue
+            line = line.strip()
+            key, value = line.split('=', 1)
+            key = key.lower()
+            if key in interesting:
+                if value.endswith(os.pathsep):
+                    value = value[:-1]
+                result[key] = removeDuplicates(value)
+
+    finally:
+        popen.stdout.close()
+        popen.stderr.close()
+
+    if len(result) != len(interesting):
+        raise ValueError(str(list(result.keys())))
+
+    return result
+
+# More globals
+VERSION = get_build_version()
+if VERSION < 8.0:
+    raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION)
+# MACROS = MacroExpander(VERSION)
+
+class MSVCCompiler(CCompiler) :
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    compiler_type = 'msvc'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__(self, verbose=0, dry_run=0, force=0):
+        CCompiler.__init__ (self, verbose, dry_run, force)
+        self.__version = VERSION
+        self.__root = r"Software\Microsoft\VisualStudio"
+        # self.__macros = MACROS
+        self.__paths = []
+        # target platform (.plat_name is consistent with 'bdist')
+        self.plat_name = None
+        self.__arch = None # deprecated name
+        self.initialized = False
+
+    def initialize(self, plat_name=None):
+        # multi-init means we would need to check platform same each time...
+        assert not self.initialized, "don't init multiple times"
+        if plat_name is None:
+            plat_name = get_platform()
+        # sanity check for platforms to prevent obscure errors later.
+        ok_plats = 'win32', 'win-amd64', 'win-ia64'
+        if plat_name not in ok_plats:
+            raise DistutilsPlatformError("--plat-name must be one of %s" %
+                                         (ok_plats,))
+
+        if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
+            # to cross compile, you use 'x86_amd64'.
+            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
+            # compile use 'x86' (ie, it runs the x86 compiler directly)
+            # No idea how itanium handles this, if at all.
+            if plat_name == get_platform() or plat_name == 'win32':
+                # native build or cross-compile to win32
+                plat_spec = PLAT_TO_VCVARS[plat_name]
+            else:
+                # cross compile from win32 -> some 64bit
+                plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
+                            PLAT_TO_VCVARS[plat_name]
+
+            vc_env = query_vcvarsall(VERSION, plat_spec)
+
+            # take care to only use strings in the environment.
+            self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep)
+            os.environ['lib'] = vc_env['lib'].encode('mbcs')
+            os.environ['include'] = vc_env['include'].encode('mbcs')
+
+            if len(self.__paths) == 0:
+                raise DistutilsPlatformError("Python was built with %s, "
+                       "and extensions need to be built with the same "
+                       "version of the compiler, but it isn't installed."
+                       % self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            #self.set_path_env_var('lib')
+            #self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in os.environ['path'].split(';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = ";".join(self.__paths)
+
+        self.preprocess_options = None
+        if self.__arch == "x86":
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames(self,
+                         source_filenames,
+                         strip_dir=0,
+                         output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            (base, ext) = os.path.splitext (src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError ("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext in self._rc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.obj_extension))
+        return obj_names
+
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=0,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized:
+            self.initialize()
+        compile_info = self._setup_compile(output_dir, macros, include_dirs,
+                                           sources, depends, extra_postargs)
+        macros, objects, extra_postargs, pp_opts, build = compile_info
+
+        compile_opts = extra_preargs or []
+        compile_opts.append ('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn([self.rc] + pp_opts +
+                               [output_opt] + [input_opt])
+                except DistutilsExecError, msg:
+                    raise CompileError(msg)
+                continue
+            elif ext in self._mc_extensions:
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+                h_dir = os.path.dirname(src)
+                rc_dir = os.path.dirname(obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn([self.mc] +
+                               ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext (os.path.basename (src))
+                    rc_file = os.path.join (rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn([self.rc] +
+                               ["/fo" + obj] + [rc_file])
+
+                except DistutilsExecError, msg:
+                    raise CompileError(msg)
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError("Don't know how to compile %s to %s"
+                                   % (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn([self.cc] + compile_opts + pp_opts +
+                           [input_opt, output_opt] +
+                           extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError(msg)
+
+        return objects
+
+
+    def create_static_lib(self,
+                          objects,
+                          output_libname,
+                          output_dir=None,
+                          debug=0,
+                          target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        (objects, output_dir) = self._fix_object_args(objects, output_dir)
+        output_filename = self.library_filename(output_libname,
+                                                output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass # XXX what goes here?
+            try:
+                self.spawn([self.lib] + lib_args)
+            except DistutilsExecError, msg:
+                raise LibError(msg)
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+
+    def link(self,
+             target_desc,
+             objects,
+             output_filename,
+             output_dir=None,
+             libraries=None,
+             library_dirs=None,
+             runtime_library_dirs=None,
+             export_symbols=None,
+             debug=0,
+             extra_preargs=None,
+             extra_postargs=None,
+             build_temp=None,
+             target_lang=None):
+
+        if not self.initialized:
+            self.initialize()
+        (objects, output_dir) = self._fix_object_args(objects, output_dir)
+        fixed_args = self._fix_lib_args(libraries, library_dirs,
+                                        runtime_library_dirs)
+        (libraries, library_dirs, runtime_library_dirs) = fixed_args
+
+        if runtime_library_dirs:
+            self.warn ("I don't know what to do with 'runtime_library_dirs': "
+                       + str (runtime_library_dirs))
+
+        lib_opts = gen_lib_options(self,
+                                   library_dirs, runtime_library_dirs,
+                                   libraries)
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            build_temp = os.path.dirname(objects[0])
+            if export_symbols is not None:
+                (dll_name, dll_ext) = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    build_temp,
+                    self.library_filename(dll_name))
+                ld_args.append ('/IMPLIB:' + implib_file)
+
+            # Embedded manifests are recommended - see MSDN article titled
+            # "How to: Embed a Manifest Inside a C/C++ Application"
+            # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+            # Ask the linker to generate the manifest in the temp dir, so
+            # we can embed it later.
+            temp_manifest = os.path.join(
+                    build_temp,
+                    os.path.basename(output_filename) + ".manifest")
+            ld_args.append('/MANIFESTFILE:' + temp_manifest)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                self.spawn([self.linker] + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError(msg)
+
+            # embed the manifest
+            # XXX - this is somewhat fragile - if mt.exe fails, distutils
+            # will still consider the DLL up-to-date, but it will not have a
+            # manifest.  Maybe we should link to a temp file?  OTOH, that
+            # implies a build environment error that shouldn't go undetected.
+            if target_desc == CCompiler.EXECUTABLE:
+                mfid = 1
+            else:
+                mfid = 2
+                self._remove_visual_c_ref(temp_manifest)
+            out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
+            try:
+                self.spawn(['mt.exe', '-nologo', '-manifest',
+                            temp_manifest, out_arg])
+            except DistutilsExecError, msg:
+                raise LinkError(msg)
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    def _remove_visual_c_ref(self, manifest_file):
+        try:
+            # Remove references to the Visual C runtime, so they will
+            # fall through to the Visual C dependency of Python.exe.
+            # This way, when installed for a restricted user (e.g.
+            # runtimes are not in WinSxS folder, but in Python's own
+            # folder), the runtimes do not need to be in every folder
+            # with .pyd's.
+            manifest_f = open(manifest_file)
+            try:
+                manifest_buf = manifest_f.read()
+            finally:
+                manifest_f.close()
+            pattern = re.compile(
+                r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
+                r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
+                re.DOTALL)
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            pattern = "<dependentAssembly>\s*</dependentAssembly>"
+            manifest_buf = re.sub(pattern, "", manifest_buf)
+            manifest_f = open(manifest_file, 'w')
+            try:
+                manifest_f.write(manifest_buf)
+            finally:
+                manifest_f.close()
+        except IOError:
+            pass
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option(self, dir):
+        raise DistutilsPlatformError(
+              "don't know how to set runtime library search path for MSVC++")
+
+    def library_option(self, lib):
+        return self.library_filename(lib)
+
+
+    def find_library_file(self, dirs, lib, debug=0):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename (name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in os.environ['Path'].split(';'):
+            fn = os.path.join(os.path.abspath(p),exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
diff --git a/lib/distutils/distutils/msvccompiler.py b/lib/distutils/distutils/msvccompiler.py
new file mode 100644
index 0000000..0e69fd3
--- /dev/null
+++ b/lib/distutils/distutils/msvccompiler.py
@@ -0,0 +1,659 @@
+"""distutils.msvccompiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for the Microsoft Visual Studio.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+
+__revision__ = "$Id$"
+
+import sys
+import os
+import string
+
+from distutils.errors import (DistutilsExecError, DistutilsPlatformError,
+                              CompileError, LibError, LinkError)
+from distutils.ccompiler import CCompiler, gen_lib_options
+from distutils import log
+
+_can_read_reg = 0
+try:
+    import _winreg
+
+    _can_read_reg = 1
+    hkey_mod = _winreg
+
+    RegOpenKeyEx = _winreg.OpenKeyEx
+    RegEnumKey = _winreg.EnumKey
+    RegEnumValue = _winreg.EnumValue
+    RegError = _winreg.error
+
+except ImportError:
+    try:
+        import win32api
+        import win32con
+        _can_read_reg = 1
+        hkey_mod = win32con
+
+        RegOpenKeyEx = win32api.RegOpenKeyEx
+        RegEnumKey = win32api.RegEnumKey
+        RegEnumValue = win32api.RegEnumValue
+        RegError = win32api.error
+
+    except ImportError:
+        log.info("Warning: Can't read registry to find the "
+                 "necessary compiler setting\n"
+                 "Make sure that Python modules _winreg, "
+                 "win32api or win32con are installed.")
+        pass
+
+if _can_read_reg:
+    HKEYS = (hkey_mod.HKEY_USERS,
+             hkey_mod.HKEY_CURRENT_USER,
+             hkey_mod.HKEY_LOCAL_MACHINE,
+             hkey_mod.HKEY_CLASSES_ROOT)
+
+def read_keys(base, key):
+    """Return list of registry keys."""
+
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    L = []
+    i = 0
+    while 1:
+        try:
+            k = RegEnumKey(handle, i)
+        except RegError:
+            break
+        L.append(k)
+        i = i + 1
+    return L
+
+def read_values(base, key):
+    """Return dict of registry keys and values.
+
+    All names are converted to lowercase.
+    """
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    d = {}
+    i = 0
+    while 1:
+        try:
+            name, value, type = RegEnumValue(handle, i)
+        except RegError:
+            break
+        name = name.lower()
+        d[convert_mbcs(name)] = convert_mbcs(value)
+        i = i + 1
+    return d
+
+def convert_mbcs(s):
+    enc = getattr(s, "encode", None)
+    if enc is not None:
+        try:
+            s = enc("mbcs")
+        except UnicodeError:
+            pass
+    return s
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        for base in HKEYS:
+            d = read_values(base, path)
+            if d:
+                self.macros["$(%s)" % macro] = d[key]
+                break
+
+    def load_macros(self, version):
+        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+        net = r"Software\Microsoft\.NETFramework"
+        self.set_macro("FrameworkDir", net, "installroot")
+        try:
+            if version > 7.0:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+            else:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+        except KeyError:
+            raise DistutilsPlatformError, \
+                  ("""Python was built with Visual Studio 2003;
+extensions must be built with a compiler than can generate compatible binaries.
+Visual Studio 2003 was not found on this system. If you have Cygwin installed,
+you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
+
+        p = r"Software\Microsoft\NET Framework Setup\Product"
+        for base in HKEYS:
+            try:
+                h = RegOpenKeyEx(base, p)
+            except RegError:
+                continue
+            key = RegEnumKey(h, 0)
+            d = read_values(base, r"%s\%s" % (p, key))
+            self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = string.replace(s, k, v)
+        return s
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+
+    prefix = "MSC v."
+    i = string.find(sys.version, prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+def get_build_architecture():
+    """Return the processor architecture.
+
+    Possible results are "Intel", "Itanium", or "AMD64".
+    """
+
+    prefix = " bit ("
+    i = string.find(sys.version, prefix)
+    if i == -1:
+        return "Intel"
+    j = string.find(sys.version, ")", i)
+    return sys.version[i+len(prefix):j]
+
+def normalize_and_reduce_paths(paths):
+    """Return a list of normalized paths with duplicates removed.
+
+    The current order of paths is maintained.
+    """
+    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
+    reduced_paths = []
+    for p in paths:
+        np = os.path.normpath(p)
+        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
+        if np not in reduced_paths:
+            reduced_paths.append(np)
+    return reduced_paths
+
+
+class MSVCCompiler (CCompiler) :
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    compiler_type = 'msvc'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__ (self, verbose=0, dry_run=0, force=0):
+        CCompiler.__init__ (self, verbose, dry_run, force)
+        self.__version = get_build_version()
+        self.__arch = get_build_architecture()
+        if self.__arch == "Intel":
+            # x86
+            if self.__version >= 7:
+                self.__root = r"Software\Microsoft\VisualStudio"
+                self.__macros = MacroExpander(self.__version)
+            else:
+                self.__root = r"Software\Microsoft\Devstudio"
+            self.__product = "Visual Studio version %s" % self.__version
+        else:
+            # Win64. Assume this was built with the platform SDK
+            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
+
+        self.initialized = False
+
+    def initialize(self):
+        self.__paths = []
+        if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
+            # Assume that the SDK set up everything alright; don't try to be
+            # smarter
+            self.cc = "cl.exe"
+            self.linker = "link.exe"
+            self.lib = "lib.exe"
+            self.rc = "rc.exe"
+            self.mc = "mc.exe"
+        else:
+            self.__paths = self.get_msvc_paths("path")
+
+            if len (self.__paths) == 0:
+                raise DistutilsPlatformError, \
+                      ("Python was built with %s, "
+                       "and extensions need to be built with the same "
+                       "version of the compiler, but it isn't installed." % self.__product)
+
+            self.cc = self.find_exe("cl.exe")
+            self.linker = self.find_exe("link.exe")
+            self.lib = self.find_exe("lib.exe")
+            self.rc = self.find_exe("rc.exe")   # resource compiler
+            self.mc = self.find_exe("mc.exe")   # message compiler
+            self.set_path_env_var('lib')
+            self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in string.split(os.environ['path'], ';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        self.__paths = normalize_and_reduce_paths(self.__paths)
+        os.environ['path'] = string.join(self.__paths, ';')
+
+        self.preprocess_options = None
+        if self.__arch == "Intel":
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+                                          '/Z7', '/D_DEBUG']
+        else:
+            # Win64
+            self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
+                                     '/DNDEBUG']
+            self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
+                                          '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+                ]
+        else:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            (base, ext) = os.path.splitext (src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError ("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext in self._rc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=0,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized: self.initialize()
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+
+        compile_opts = extra_preargs or []
+        compile_opts.append ('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn ([self.rc] + pp_opts +
+                                [output_opt] + [input_opt])
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue
+            elif ext in self._mc_extensions:
+
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+
+                h_dir = os.path.dirname (src)
+                rc_dir = os.path.dirname (obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn ([self.mc] +
+                                ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext (os.path.basename (src))
+                    rc_file = os.path.join (rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn ([self.rc] +
+                                ["/fo" + obj] + [rc_file])
+
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError (
+                    "Don't know how to compile %s to %s" % \
+                    (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn ([self.cc] + compile_opts + pp_opts +
+                            [input_opt, output_opt] +
+                            extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+        return objects
+
+    # compile ()
+
+
+    def create_static_lib (self,
+                           objects,
+                           output_libname,
+                           output_dir=None,
+                           debug=0,
+                           target_lang=None):
+
+        if not self.initialized: self.initialize()
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        output_filename = \
+            self.library_filename (output_libname, output_dir=output_dir)
+
+        if self._need_link (objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn ([self.lib] + lib_args)
+            except DistutilsExecError, msg:
+                raise LibError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # create_static_lib ()
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        if not self.initialized: self.initialize()
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        (libraries, library_dirs, runtime_library_dirs) = \
+            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            self.warn ("I don't know what to do with 'runtime_library_dirs': "
+                       + str (runtime_library_dirs))
+
+        lib_opts = gen_lib_options (self,
+                                    library_dirs, runtime_library_dirs,
+                                    libraries)
+        if output_dir is not None:
+            output_filename = os.path.join (output_dir, output_filename)
+
+        if self._need_link (objects, output_filename):
+
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            if export_symbols is not None:
+                (dll_name, dll_ext) = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    os.path.dirname(objects[0]),
+                    self.library_filename(dll_name))
+                ld_args.append ('/IMPLIB:' + implib_file)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath (os.path.dirname (output_filename))
+            try:
+                self.spawn ([self.linker] + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # link ()
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option (self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option (self, dir):
+        raise DistutilsPlatformError, \
+              "don't know how to set runtime library search path for MSVC++"
+
+    def library_option (self, lib):
+        return self.library_filename (lib)
+
+
+    def find_library_file (self, dirs, lib, debug=0):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename (name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # find_library_file ()
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in string.split(os.environ['Path'],';'):
+            fn = os.path.join(os.path.abspath(p),exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
+
+    def get_msvc_paths(self, path, platform='x86'):
+        """Get a list of devstudio directories (include, lib or path).
+
+        Return a list of strings.  The list will be empty if unable to
+        access the registry or appropriate registry keys not found.
+        """
+
+        if not _can_read_reg:
+            return []
+
+        path = path + " dirs"
+        if self.__version >= 7:
+            key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+                   % (self.__root, self.__version))
+        else:
+            key = (r"%s\6.0\Build System\Components\Platforms"
+                   r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+        for base in HKEYS:
+            d = read_values(base, key)
+            if d:
+                if self.__version >= 7:
+                    return string.split(self.__macros.sub(d[path]), ";")
+                else:
+                    return string.split(d[path], ";")
+        # MSVC 6 seems to create the registry entries we need only when
+        # the GUI is run.
+        if self.__version == 6:
+            for base in HKEYS:
+                if read_values(base, r"%s\6.0" % self.__root) is not None:
+                    self.warn("It seems you have Visual Studio 6 installed, "
+                        "but the expected registry settings are not present.\n"
+                        "You must at least run the Visual Studio GUI once "
+                        "so that these entries are created.")
+                    break
+        return []
+
+    def set_path_env_var(self, name):
+        """Set environment variable 'name' to an MSVC path type value.
+
+        This is equivalent to a SET command prior to execution of spawned
+        commands.
+        """
+
+        if name == "lib":
+            p = self.get_msvc_paths("library")
+        else:
+            p = self.get_msvc_paths(name)
+        if p:
+            os.environ[name] = string.join(p, ';')
+
+
+if get_build_version() >= 8.0:
+    log.debug("Importing new compiler from distutils.msvc9compiler")
+    OldMSVCCompiler = MSVCCompiler
+    from distutils.msvc9compiler import MSVCCompiler
+    # get_build_architecture not really relevant now we support cross-compile
+    from distutils.msvc9compiler import MacroExpander
diff --git a/lib/distutils/distutils/spawn.py b/lib/distutils/distutils/spawn.py
new file mode 100644
index 0000000..5c014c4
--- /dev/null
+++ b/lib/distutils/distutils/spawn.py
@@ -0,0 +1,173 @@
+"""distutils.spawn
+
+Provides the 'spawn()' function, a front-end to various platform-
+specific functions for launching another program in a sub-process.
+Also provides the 'find_executable()' to search the path for a given
+executable name.
+"""
+
+__revision__ = "$Id$"
+
+import sys
+import os
+
+from distutils.errors import DistutilsPlatformError, DistutilsExecError
+from distutils import log
+
+def spawn(cmd, search_path=1, verbose=0, dry_run=0):
+    """Run another program, specified as a command list 'cmd', in a new process.
+
+    'cmd' is just the argument list for the new process, ie.
+    cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+    There is no way to run a program with a name different from that of its
+    executable.
+
+    If 'search_path' is true (the default), the system's executable
+    search path will be used to find the program; otherwise, cmd[0]
+    must be the exact path to the executable.  If 'dry_run' is true,
+    the command will not actually be run.
+
+    Raise DistutilsExecError if running the program fails in any way; just
+    return on success.
+    """
+    if os.name == 'posix':
+        _spawn_posix(cmd, search_path, dry_run=dry_run)
+    elif os.name == 'nt':
+        _spawn_nt(cmd, search_path, dry_run=dry_run)
+    elif os.name == 'os2':
+        _spawn_os2(cmd, search_path, dry_run=dry_run)
+    else:
+        raise DistutilsPlatformError, \
+              "don't know how to spawn programs on platform '%s'" % os.name
+
+def _nt_quote_args(args):
+    """Quote command-line arguments for DOS/Windows conventions.
+
+    Just wraps every argument which contains blanks in double quotes, and
+    returns a new argument list.
+    """
+    # XXX this doesn't seem very robust to me -- but if the Windows guys
+    # say it'll work, I guess I'll have to accept it.  (What if an arg
+    # contains quotes?  What other magic characters, other than spaces,
+    # have to be escaped?  Is there an escaping mechanism other than
+    # quoting?)
+    for i, arg in enumerate(args):
+        if ' ' in arg:
+            args[i] = '"%s"' % arg
+    return args
+
+def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
+    executable = cmd[0]
+    cmd = _nt_quote_args(cmd)
+    if search_path:
+        # either we find one or it stays the same
+        executable = find_executable(executable) or executable
+    log.info(' '.join([executable] + cmd[1:]))
+    if not dry_run:
+        # spawn for NT requires a full path to the .exe
+        try:
+            rc = os.spawnv(os.P_WAIT, executable, cmd)
+        except OSError, exc:
+            # this seems to happen when the command isn't found
+            raise DistutilsExecError, \
+                  "command '%s' failed: %s" % (cmd[0], exc[-1])
+        if rc != 0:
+            # and this reflects the command running but failing
+            raise DistutilsExecError, \
+                  "command '%s' failed with exit status %d" % (cmd[0], rc)
+
+def _spawn_os2(cmd, search_path=1, verbose=0, dry_run=0):
+    executable = cmd[0]
+    if search_path:
+        # either we find one or it stays the same
+        executable = find_executable(executable) or executable
+    log.info(' '.join([executable] + cmd[1:]))
+    if not dry_run:
+        # spawnv for OS/2 EMX requires a full path to the .exe
+        try:
+            rc = os.spawnv(os.P_WAIT, executable, cmd)
+        except OSError, exc:
+            # this seems to happen when the command isn't found
+            raise DistutilsExecError, \
+                  "command '%s' failed: %s" % (cmd[0], exc[-1])
+        if rc != 0:
+            # and this reflects the command running but failing
+            log.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
+            raise DistutilsExecError, \
+                  "command '%s' failed with exit status %d" % (cmd[0], rc)
+
+
+def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
+    log.info(' '.join(cmd))
+    if dry_run:
+        return
+    exec_fn = search_path and os.execvp or os.execv
+    pid = os.fork()
+
+    if pid == 0:  # in the child
+        try:
+            exec_fn(cmd[0], cmd)
+        except OSError, e:
+            sys.stderr.write("unable to execute %s: %s\n" %
+                             (cmd[0], e.strerror))
+            os._exit(1)
+
+        sys.stderr.write("unable to execute %s for unknown reasons" % cmd[0])
+        os._exit(1)
+    else:   # in the parent
+        # Loop until the child either exits or is terminated by a signal
+        # (ie. keep waiting if it's merely stopped)
+        while 1:
+            try:
+                pid, status = os.waitpid(pid, 0)
+            except OSError, exc:
+                import errno
+                if exc.errno == errno.EINTR:
+                    continue
+                raise DistutilsExecError, \
+                      "command '%s' failed: %s" % (cmd[0], exc[-1])
+            if os.WIFSIGNALED(status):
+                raise DistutilsExecError, \
+                      "command '%s' terminated by signal %d" % \
+                      (cmd[0], os.WTERMSIG(status))
+
+            elif os.WIFEXITED(status):
+                exit_status = os.WEXITSTATUS(status)
+                if exit_status == 0:
+                    return   # hey, it succeeded!
+                else:
+                    raise DistutilsExecError, \
+                          "command '%s' failed with exit status %d" % \
+                          (cmd[0], exit_status)
+
+            elif os.WIFSTOPPED(status):
+                continue
+
+            else:
+                raise DistutilsExecError, \
+                      "unknown error executing '%s': termination status %d" % \
+                      (cmd[0], status)
+
+def find_executable(executable, path=None):
+    """Tries to find 'executable' in the directories listed in 'path'.
+
+    A string listing directories separated by 'os.pathsep'; defaults to
+    os.environ['PATH'].  Returns the complete filename or None if not found.
+    """
+    if path is None:
+        path = os.environ['PATH']
+    paths = path.split(os.pathsep)
+    base, ext = os.path.splitext(executable)
+
+    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+        executable = executable + '.exe'
+
+    if not os.path.isfile(executable):
+        for p in paths:
+            f = os.path.join(p, executable)
+            if os.path.isfile(f):
+                # the file exists, we have a shot at spawn working
+                return f
+        return None
+    else:
+        return executable
diff --git a/lib/distutils/distutils/sysconfig.py b/lib/distutils/distutils/sysconfig.py
new file mode 100644
index 0000000..d206e0c
--- /dev/null
+++ b/lib/distutils/distutils/sysconfig.py
@@ -0,0 +1,571 @@
+"""Provide access to Python's configuration information.  The specific
+configuration variables available depend heavily on the platform and
+configuration.  The values may be retrieved using
+get_config_var(name), and the list of variables is available via
+get_config_vars().keys().  Additional convenience functions are also
+available.
+
+Written by:   Fred L. Drake, Jr.
+Email:        <fdrake@acm.org>
+"""
+
+__revision__ = "$Id$"
+
+import os
+import re
+import string
+import sys
+
+from distutils.errors import DistutilsPlatformError
+
+# These are needed in a couple of spots, so just compute them once.
+PREFIX = os.path.normpath(sys.prefix)
+EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+
+# Path to the base directory of the project. On Windows the binary may
+# live in project/PCBuild9.  If we're dealing with an x64 Windows build,
+# it'll live in project/PCbuild/amd64.
+project_base = os.path.dirname(os.path.abspath(sys.executable))
+if os.name == "nt" and "pcbuild" in project_base[-8:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in project_base[-10:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in project_base[-14:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+
+# python_build: (Boolean) if true, we're either building Python or
+# building an extension with an un-installed Python, so we use
+# different (hard-wired) directories.
+# Setup.local is available for Makefile builds including VPATH builds,
+# Setup.dist is available on Windows
+def _python_build():
+    for fn in ("Setup.dist", "Setup.local"):
+        if os.path.isfile(os.path.join(project_base, "Modules", fn)):
+            return True
+    return False
+python_build = _python_build()
+
+
+def get_python_version():
+    """Return a string containing the major and minor Python version,
+    leaving off the patchlevel.  Sample return values could be '1.5'
+    or '2.2'.
+    """
+    return sys.version[:3]
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+    """Return the directory containing installed Python header files.
+
+    If 'plat_specific' is false (the default), this is the path to the
+    non-platform-specific header files, i.e. Python.h and so on;
+    otherwise, this is the path to platform-specific header files
+    (namely pyconfig.h).
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        if python_build:
+            buildir = os.path.dirname(sys.executable)
+            if plat_specific:
+                # python.h is located in the buildir
+                inc_dir = buildir
+            else:
+                # the source dir is relative to the buildir
+                srcdir = os.path.abspath(os.path.join(buildir,
+                                         get_config_var('srcdir')))
+                # Include is located in the srcdir
+                inc_dir = os.path.join(srcdir, "Include")
+            return inc_dir
+        return os.path.join(prefix, "include", "python" + get_python_version())
+    elif os.name == "nt":
+        return os.path.join(prefix, "include")
+    elif os.name == "os2":
+        return os.path.join(prefix, "Include")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its C header files "
+            "on platform '%s'" % os.name)
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    """Return the directory containing the Python library (standard or
+    site additions).
+
+    If 'plat_specific' is true, return the directory containing
+    platform-specific modules, i.e. any module from a non-pure-Python
+    module distribution; otherwise, return the platform-shared library
+    directory.  If 'standard_lib' is true, return the directory
+    containing standard Python library modules; otherwise, return the
+    directory for site-specific modules.
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        libpython = os.path.join(prefix,
+                                 "lib", "python" + get_python_version())
+        if standard_lib:
+            return libpython
+        else:
+            return os.path.join(libpython, "site-packages")
+
+    elif os.name == "nt":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            if get_python_version() < "2.2":
+                return prefix
+            else:
+                return os.path.join(prefix, "Lib", "site-packages")
+
+    elif os.name == "os2":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            return os.path.join(prefix, "Lib", "site-packages")
+
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its library "
+            "on platform '%s'" % os.name)
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext) = \
+            get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+                            'CCSHARED', 'LDSHARED', 'SO')
+
+        if 'CC' in os.environ:
+            cc = os.environ['CC']
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc)
+
+        compiler.shared_lib_extension = so_ext
+
+
+def get_config_h_filename():
+    """Return full pathname of installed pyconfig.h file."""
+    if python_build:
+        if os.name == "nt":
+            inc_dir = os.path.join(project_base, "PC")
+        else:
+            inc_dir = project_base
+    else:
+        inc_dir = get_python_inc(plat_specific=1)
+    if get_python_version() < '2.2':
+        config_h = 'config.h'
+    else:
+        # The name of the config.h file changed in 2.2
+        config_h = 'pyconfig.h'
+    return os.path.join(inc_dir, config_h)
+
+
+def get_makefile_filename():
+    """Return full pathname of installed Makefile from the Python build."""
+    if python_build:
+        return os.path.join(os.path.dirname(sys.executable), "Makefile")
+    lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
+    return os.path.join(lib_dir, "config", "Makefile")
+
+
+def parse_config_h(fp, g=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    if g is None:
+        g = {}
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+    #
+    while 1:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try: v = int(v)
+            except ValueError: pass
+            g[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                g[m.group(1)] = 0
+    return g
+
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+def parse_makefile(fn, g=None):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    from distutils.text_file import TextFile
+    fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
+
+    if g is None:
+        g = {}
+    done = {}
+    notdone = {}
+
+    while 1:
+        line = fp.readline()
+        if line is None:  # eof
+            break
+        m = _variable_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # do variable interpolation here
+    while notdone:
+        for name in notdone.keys():
+            value = notdone[name]
+            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+            if m:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+                else:
+                    done[n] = item = ""
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        del notdone[name]
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+
+    fp.close()
+
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    g.update(done)
+    return g
+
+
+def expand_makefile_vars(s, vars):
+    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+    'string' according to 'vars' (a dictionary mapping variable names to
+    values).  Variables not present in 'vars' are silently expanded to the
+    empty string.  The variable values in 'vars' should not contain further
+    variable expansions; if 'vars' is the output of 'parse_makefile()',
+    you're fine.  Returns a variable-expanded version of 's'.
+    """
+
+    # This algorithm does multiple expansion, so if vars['foo'] contains
+    # "${bar}", it will expand ${foo} to ${bar}, and then expand
+    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
+    # 'parse_makefile()', which takes care of such expansions eagerly,
+    # according to make's variable expansion semantics.
+
+    while 1:
+        m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
+        if m:
+            (beg, end) = m.span()
+            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+        else:
+            break
+    return s
+
+
+_config_vars = None
+
+def _init_posix():
+    """Initialize the module as appropriate for POSIX systems."""
+    g = {}
+    # load the installed Makefile:
+    try:
+        filename = get_makefile_filename()
+        parse_makefile(filename, g)
+    except IOError, msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
+    # load the installed pyconfig.h:
+    try:
+        filename = get_config_h_filename()
+        parse_config_h(file(filename), g)
+    except IOError, msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
+    # On MacOSX we need to check the setting of the environment variable
+    # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
+    # it needs to be compatible.
+    # If it isn't set we set it to the configure-time value
+    if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g:
+        cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
+        cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
+        if cur_target == '':
+            cur_target = cfg_target
+            os.environ['MACOSX_DEPLOYMENT_TARGET'] = cfg_target
+        elif map(int, cfg_target.split('.')) > map(int, cur_target.split('.')):
+            my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
+                % (cur_target, cfg_target))
+            raise DistutilsPlatformError(my_msg)
+
+    # On AIX, there are wrong paths to the linker scripts in the Makefile
+    # -- these paths are relative to the Python source, but when installed
+    # the scripts are in another directory.
+    if python_build:
+        g['LDSHARED'] = g['BLDSHARED']
+
+    elif get_python_version() < '2.1':
+        # The following two branches are for 1.5.2 compatibility.
+        if sys.platform == 'aix4':          # what about AIX 3.x ?
+            # Linker script is in the config directory, not in Modules as the
+            # Makefile says.
+            python_lib = get_python_lib(standard_lib=1)
+            ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
+            python_exp = os.path.join(python_lib, 'config', 'python.exp')
+
+            g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
+
+        elif sys.platform == 'beos':
+            # Linker script is in the config directory.  In the Makefile it is
+            # relative to the srcdir, which after installation no longer makes
+            # sense.
+            python_lib = get_python_lib(standard_lib=1)
+            linkerscript_path = string.split(g['LDSHARED'])[0]
+            linkerscript_name = os.path.basename(linkerscript_path)
+            linkerscript = os.path.join(python_lib, 'config',
+                                        linkerscript_name)
+
+            # XXX this isn't the right place to do this: adding the Python
+            # library to the link, if needed, should be in the "build_ext"
+            # command.  (It's also needed for non-MS compilers on Windows, and
+            # it's taken care of for them by the 'build_ext.get_libraries()'
+            # method.)
+            g['LDSHARED'] = ("%s -L%s/lib -lpython%s" %
+                             (linkerscript, PREFIX, get_python_version()))
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_nt():
+    """Initialize the module as appropriate for NT"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+    g['VERSION'] = get_python_version().replace(".", "")
+    g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_os2():
+    """Initialize the module as appropriate for OS/2"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+
+    global _config_vars
+    _config_vars = g
+
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.  Generally this includes
+    everything needed to build extensions and install both pure modules and
+    extensions.  On Unix, this means every variable defined in Python's
+    installed Makefile; on Windows and Mac OS it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    global _config_vars
+    if _config_vars is None:
+        func = globals().get("_init_" + os.name)
+        if func:
+            func()
+        else:
+            _config_vars = {}
+
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _config_vars['prefix'] = PREFIX
+        _config_vars['exec_prefix'] = EXEC_PREFIX
+
+        if sys.platform == 'darwin':
+            kernel_version = os.uname()[2] # Kernel version (8.4.3)
+            major_version = int(kernel_version.split('.')[0])
+
+            if major_version < 8:
+                # On Mac OS X before 10.4, check if -arch and -isysroot
+                # are in CFLAGS or LDFLAGS and remove them if they are.
+                # This is needed when building extensions on a 10.3 system
+                # using a universal build of python.
+                for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
+                        # a number of derived variables. These need to be
+                        # patched up as well.
+                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+                    flags = _config_vars[key]
+                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
+                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+                    _config_vars[key] = flags
+
+            else:
+
+                # Allow the user to override the architecture flags using
+                # an environment variable.
+                # NOTE: This name was introduced by Apple in OSX 10.5 and
+                # is used by several scripting languages distributed with
+                # that OS release.
+
+                if 'ARCHFLAGS' in os.environ:
+                    arch = os.environ['ARCHFLAGS']
+                    for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
+                        # a number of derived variables. These need to be
+                        # patched up as well.
+                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+                        flags = _config_vars[key]
+                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
+                        flags = flags + ' ' + arch
+                        _config_vars[key] = flags
+
+                # If we're on OSX 10.5 or later and the user tries to
+                # compiles an extension using an SDK that is not present
+                # on the current machine it is better to not use an SDK
+                # than to fail.
+                #
+                # The major usecase for this is users using a Python.org
+                # binary installer  on OSX 10.6: that installer uses
+                # the 10.4u SDK, but that SDK is not installed by default
+                # when you install Xcode.
+                #
+                m = re.search('-isysroot\s+(\S+)', _config_vars['CFLAGS'])
+                if m is not None:
+                    sdk = m.group(1)
+                    if not os.path.exists(sdk):
+                        for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
+                             # a number of derived variables. These need to be
+                             # patched up as well.
+                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+                            flags = _config_vars[key]
+                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
+                            _config_vars[key] = flags
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_config_vars.get(name))
+        return vals
+    else:
+        return _config_vars
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary
+    returned by 'get_config_vars()'.  Equivalent to
+    get_config_vars().get(name)
+    """
+    return get_config_vars().get(name)
diff --git a/lib/distutils/distutils/text_file.py b/lib/distutils/distutils/text_file.py
new file mode 100644
index 0000000..09a798b
--- /dev/null
+++ b/lib/distutils/distutils/text_file.py
@@ -0,0 +1,304 @@
+"""text_file
+
+provides the TextFile class, which gives an interface to text files
+that (optionally) takes care of stripping comments, ignoring blank
+lines, and joining lines with backslashes."""
+
+__revision__ = "$Id$"
+
+import sys
+
+
+class TextFile:
+
+    """Provides a file-like object that takes care of all the things you
+       commonly want to do when processing a text file that has some
+       line-by-line syntax: strip comments (as long as "#" is your
+       comment character), skip blank lines, join adjacent lines by
+       escaping the newline (ie. backslash at end of line), strip
+       leading and/or trailing whitespace.  All of these are optional
+       and independently controllable.
+
+       Provides a 'warn()' method so you can generate warning messages that
+       report physical line number, even if the logical line in question
+       spans multiple physical lines.  Also provides 'unreadline()' for
+       implementing line-at-a-time lookahead.
+
+       Constructor is called as:
+
+           TextFile (filename=None, file=None, **options)
+
+       It bombs (RuntimeError) if both 'filename' and 'file' are None;
+       'filename' should be a string, and 'file' a file object (or
+       something that provides 'readline()' and 'close()' methods).  It is
+       recommended that you supply at least 'filename', so that TextFile
+       can include it in warning messages.  If 'file' is not supplied,
+       TextFile creates its own using the 'open()' builtin.
+
+       The options are all boolean, and affect the value returned by
+       'readline()':
+         strip_comments [default: true]
+           strip from "#" to end-of-line, as well as any whitespace
+           leading up to the "#" -- unless it is escaped by a backslash
+         lstrip_ws [default: false]
+           strip leading whitespace from each line before returning it
+         rstrip_ws [default: true]
+           strip trailing whitespace (including line terminator!) from
+           each line before returning it
+         skip_blanks [default: true}
+           skip lines that are empty *after* stripping comments and
+           whitespace.  (If both lstrip_ws and rstrip_ws are false,
+           then some lines may consist of solely whitespace: these will
+           *not* be skipped, even if 'skip_blanks' is true.)
+         join_lines [default: false]
+           if a backslash is the last non-newline character on a line
+           after stripping comments and whitespace, join the following line
+           to it to form one "logical line"; if N consecutive lines end
+           with a backslash, then N+1 physical lines will be joined to
+           form one logical line.
+         collapse_join [default: false]
+           strip leading whitespace from lines that are joined to their
+           predecessor; only matters if (join_lines and not lstrip_ws)
+
+       Note that since 'rstrip_ws' can strip the trailing newline, the
+       semantics of 'readline()' must differ from those of the builtin file
+       object's 'readline()' method!  In particular, 'readline()' returns
+       None for end-of-file: an empty string might just be a blank line (or
+       an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
+       not."""
+
+    default_options = { 'strip_comments': 1,
+                        'skip_blanks':    1,
+                        'lstrip_ws':      0,
+                        'rstrip_ws':      1,
+                        'join_lines':     0,
+                        'collapse_join':  0,
+                      }
+
+    def __init__ (self, filename=None, file=None, **options):
+        """Construct a new TextFile object.  At least one of 'filename'
+           (a string) and 'file' (a file-like object) must be supplied.
+           They keyword argument options are described above and affect
+           the values returned by 'readline()'."""
+
+        if filename is None and file is None:
+            raise RuntimeError, \
+                  "you must supply either or both of 'filename' and 'file'"
+
+        # set values for all options -- either from client option hash
+        # or fallback to default_options
+        for opt in self.default_options.keys():
+            if opt in options:
+                setattr (self, opt, options[opt])
+
+            else:
+                setattr (self, opt, self.default_options[opt])
+
+        # sanity check client option hash
+        for opt in options.keys():
+            if opt not in self.default_options:
+                raise KeyError, "invalid TextFile option '%s'" % opt
+
+        if file is None:
+            self.open (filename)
+        else:
+            self.filename = filename
+            self.file = file
+            self.current_line = 0       # assuming that file is at BOF!
+
+        # 'linebuf' is a stack of lines that will be emptied before we
+        # actually read from the file; it's only populated by an
+        # 'unreadline()' operation
+        self.linebuf = []
+
+
+    def open (self, filename):
+        """Open a new file named 'filename'.  This overrides both the
+           'filename' and 'file' arguments to the constructor."""
+
+        self.filename = filename
+        self.file = open (self.filename, 'r')
+        self.current_line = 0
+
+
+    def close (self):
+        """Close the current file and forget everything we know about it
+           (filename, current line number)."""
+
+        self.file.close ()
+        self.file = None
+        self.filename = None
+        self.current_line = None
+
+
+    def gen_error (self, msg, line=None):
+        outmsg = []
+        if line is None:
+            line = self.current_line
+        outmsg.append(self.filename + ", ")
+        if isinstance(line, (list, tuple)):
+            outmsg.append("lines %d-%d: " % tuple (line))
+        else:
+            outmsg.append("line %d: " % line)
+        outmsg.append(str(msg))
+        return ''.join(outmsg)
+
+
+    def error (self, msg, line=None):
+        raise ValueError, "error: " + self.gen_error(msg, line)
+
+    def warn (self, msg, line=None):
+        """Print (to stderr) a warning message tied to the current logical
+           line in the current file.  If the current logical line in the
+           file spans multiple physical lines, the warning refers to the
+           whole range, eg. "lines 3-5".  If 'line' supplied, it overrides
+           the current line number; it may be a list or tuple to indicate a
+           range of physical lines, or an integer for a single physical
+           line."""
+        sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
+
+
+    def readline (self):
+        """Read and return a single logical line from the current file (or
+           from an internal buffer if lines have previously been "unread"
+           with 'unreadline()').  If the 'join_lines' option is true, this
+           may involve reading multiple physical lines concatenated into a
+           single string.  Updates the current line number, so calling
+           'warn()' after 'readline()' emits a warning about the physical
+           line(s) just read.  Returns None on end-of-file, since the empty
+           string can occur if 'rstrip_ws' is true but 'strip_blanks' is
+           not."""
+
+        # If any "unread" lines waiting in 'linebuf', return the top
+        # one.  (We don't actually buffer read-ahead data -- lines only
+        # get put in 'linebuf' if the client explicitly does an
+        # 'unreadline()'.
+        if self.linebuf:
+            line = self.linebuf[-1]
+            del self.linebuf[-1]
+            return line
+
+        buildup_line = ''
+
+        while 1:
+            # read the line, make it None if EOF
+            line = self.file.readline()
+            if line == '': line = None
+
+            if self.strip_comments and line:
+
+                # Look for the first "#" in the line.  If none, never
+                # mind.  If we find one and it's the first character, or
+                # is not preceded by "\", then it starts a comment --
+                # strip the comment, strip whitespace before it, and
+                # carry on.  Otherwise, it's just an escaped "#", so
+                # unescape it (and any other escaped "#"'s that might be
+                # lurking in there) and otherwise leave the line alone.
+
+                pos = line.find("#")
+                if pos == -1:           # no "#" -- no comments
+                    pass
+
+                # It's definitely a comment -- either "#" is the first
+                # character, or it's elsewhere and unescaped.
+                elif pos == 0 or line[pos-1] != "\\":
+                    # Have to preserve the trailing newline, because it's
+                    # the job of a later step (rstrip_ws) to remove it --
+                    # and if rstrip_ws is false, we'd better preserve it!
+                    # (NB. this means that if the final line is all comment
+                    # and has no trailing newline, we will think that it's
+                    # EOF; I think that's OK.)
+                    eol = (line[-1] == '\n') and '\n' or ''
+                    line = line[0:pos] + eol
+
+                    # If all that's left is whitespace, then skip line
+                    # *now*, before we try to join it to 'buildup_line' --
+                    # that way constructs like
+                    #   hello \\
+                    #   # comment that should be ignored
+                    #   there
+                    # result in "hello there".
+                    if line.strip() == "":
+                        continue
+
+                else:                   # it's an escaped "#"
+                    line = line.replace("\\#", "#")
+
+
+            # did previous line end with a backslash? then accumulate
+            if self.join_lines and buildup_line:
+                # oops: end of file
+                if line is None:
+                    self.warn ("continuation line immediately precedes "
+                               "end-of-file")
+                    return buildup_line
+
+                if self.collapse_join:
+                    line = line.lstrip()
+                line = buildup_line + line
+
+                # careful: pay attention to line number when incrementing it
+                if isinstance(self.current_line, list):
+                    self.current_line[1] = self.current_line[1] + 1
+                else:
+                    self.current_line = [self.current_line,
+                                         self.current_line+1]
+            # just an ordinary line, read it as usual
+            else:
+                if line is None:        # eof
+                    return None
+
+                # still have to be careful about incrementing the line number!
+                if isinstance(self.current_line, list):
+                    self.current_line = self.current_line[1] + 1
+                else:
+                    self.current_line = self.current_line + 1
+
+
+            # strip whitespace however the client wants (leading and
+            # trailing, or one or the other, or neither)
+            if self.lstrip_ws and self.rstrip_ws:
+                line = line.strip()
+            elif self.lstrip_ws:
+                line = line.lstrip()
+            elif self.rstrip_ws:
+                line = line.rstrip()
+
+            # blank line (whether we rstrip'ed or not)? skip to next line
+            # if appropriate
+            if (line == '' or line == '\n') and self.skip_blanks:
+                continue
+
+            if self.join_lines:
+                if line[-1] == '\\':
+                    buildup_line = line[:-1]
+                    continue
+
+                if line[-2:] == '\\\n':
+                    buildup_line = line[0:-2] + '\n'
+                    continue
+
+            # well, I guess there's some actual content there: return it
+            return line
+
+    # readline ()
+
+
+    def readlines (self):
+        """Read and return the list of all logical lines remaining in the
+           current file."""
+
+        lines = []
+        while 1:
+            line = self.readline()
+            if line is None:
+                return lines
+            lines.append (line)
+
+
+    def unreadline (self, line):
+        """Push 'line' (a string) onto an internal buffer that will be
+           checked by future 'readline()' calls.  Handy for implementing
+           a parser with line-at-a-time lookahead."""
+
+        self.linebuf.append (line)
diff --git a/lib/distutils/distutils/unixccompiler.py b/lib/distutils/distutils/unixccompiler.py
new file mode 100644
index 0000000..c49ac9b
--- /dev/null
+++ b/lib/distutils/distutils/unixccompiler.py
@@ -0,0 +1,346 @@
+"""distutils.unixccompiler
+
+Contains the UnixCCompiler class, a subclass of CCompiler that handles
+the "typical" Unix-style command-line C compiler:
+  * macros defined with -Dname[=value]
+  * macros undefined with -Uname
+  * include search directories specified with -Idir
+  * libraries specified with -lllib
+  * library search directories specified with -Ldir
+  * compile handled by 'cc' (or similar) executable with -c option:
+    compiles .c to .o
+  * link static library handled by 'ar' command (possibly with 'ranlib')
+  * link shared library handled by 'cc -shared'
+"""
+
+__revision__ = "$Id$"
+
+import os, sys, re
+from types import StringType, NoneType
+
+from distutils import sysconfig
+from distutils.dep_util import newer
+from distutils.ccompiler import \
+     CCompiler, gen_preprocess_options, gen_lib_options
+from distutils.errors import \
+     DistutilsExecError, CompileError, LibError, LinkError
+from distutils import log
+
+# XXX Things not currently handled:
+#   * optimization/debug/warning flags; we just use whatever's in Python's
+#     Makefile and live with it.  Is this adequate?  If not, we might
+#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+#     SunCCompiler, and I suspect down that road lies madness.
+#   * even if we don't know a warning flag from an optimization flag,
+#     we need some way for outsiders to feed preprocessor/compiler/linker
+#     flags in to us -- eg. a sysadmin might want to mandate certain flags
+#     via a site config file, or a user might want to set something for
+#     compiling this module distribution only via the setup.py command
+#     line, whatever.  As long as these options come from something on the
+#     current system, they can be as system-dependent as they like, and we
+#     should just happily stuff them into the preprocessor/compiler/linker
+#     options and carry on.
+
+def _darwin_compiler_fixup(compiler_so, cc_args):
+    """
+    This function will strip '-isysroot PATH' and '-arch ARCH' from the
+    compile flags if the user has specified one them in extra_compile_flags.
+
+    This is needed because '-arch ARCH' adds another architecture to the
+    build, without a way to remove an architecture. Furthermore GCC will
+    barf if multiple '-isysroot' arguments are present.
+    """
+    stripArch = stripSysroot = 0
+
+    compiler_so = list(compiler_so)
+    kernel_version = os.uname()[2] # 8.4.3
+    major_version = int(kernel_version.split('.')[0])
+
+    if major_version < 8:
+        # OSX before 10.4.0, these don't support -arch and -isysroot at
+        # all.
+        stripArch = stripSysroot = True
+    else:
+        stripArch = '-arch' in cc_args
+        stripSysroot = '-isysroot' in cc_args
+
+    if stripArch or 'ARCHFLAGS' in os.environ:
+        while 1:
+            try:
+                index = compiler_so.index('-arch')
+                # Strip this argument and the next one:
+                del compiler_so[index:index+2]
+            except ValueError:
+                break
+
+    if 'ARCHFLAGS' in os.environ and not stripArch:
+        # User specified different -arch flags in the environ,
+        # see also distutils.sysconfig
+        compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
+
+    if stripSysroot:
+        try:
+            index = compiler_so.index('-isysroot')
+            # Strip this argument and the next one:
+            del compiler_so[index:index+2]
+        except ValueError:
+            pass
+
+    # Check if the SDK that is used during compilation actually exists,
+    # the universal build requires the usage of a universal SDK and not all
+    # users have that installed by default.
+    sysroot = None
+    if '-isysroot' in cc_args:
+        idx = cc_args.index('-isysroot')
+        sysroot = cc_args[idx+1]
+    elif '-isysroot' in compiler_so:
+        idx = compiler_so.index('-isysroot')
+        sysroot = compiler_so[idx+1]
+
+    if sysroot and not os.path.isdir(sysroot):
+        log.warn("Compiling with an SDK that doesn't seem to exist: %s",
+                sysroot)
+        log.warn("Please check your Xcode installation")
+
+    return compiler_so
+
+class UnixCCompiler(CCompiler):
+
+    compiler_type = 'unix'
+
+    # These are used by CCompiler in two places: the constructor sets
+    # instance attributes 'preprocessor', 'compiler', etc. from them, and
+    # 'set_executable()' allows any of these to be set.  The defaults here
+    # are pretty generic; they will probably have to be set by an outsider
+    # (eg. using information discovered by the sysconfig about building
+    # Python extensions).
+    executables = {'preprocessor' : None,
+                   'compiler'     : ["cc"],
+                   'compiler_so'  : ["cc"],
+                   'compiler_cxx' : ["cc"],
+                   'linker_so'    : ["cc", "-shared"],
+                   'linker_exe'   : ["cc"],
+                   'archiver'     : ["ar", "-cr"],
+                   'ranlib'       : None,
+                  }
+
+    if sys.platform[:6] == "darwin":
+        executables['ranlib'] = ["ranlib"]
+
+    # Needed for the filename generation methods provided by the base
+    # class, CCompiler.  NB. whoever instantiates/uses a particular
+    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+    # reasonable common default here, but it's not necessarily used on all
+    # Unices!
+
+    src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".so"
+    dylib_lib_extension = ".dylib"
+    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+    if sys.platform == "cygwin":
+        exe_extension = ".exe"
+
+    def preprocess(self, source,
+                   output_file=None, macros=None, include_dirs=None,
+                   extra_preargs=None, extra_postargs=None):
+        ignore, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = self.preprocessor + pp_opts
+        if output_file:
+            pp_args.extend(['-o', output_file])
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or we're
+        # generating output to stdout, or there's a target output file and
+        # the source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        compiler_so = self.compiler_so
+        if sys.platform == 'darwin':
+            compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
+        try:
+            self.spawn(compiler_so + cc_args + [src, '-o', obj] +
+                       extra_postargs)
+        except DistutilsExecError, msg:
+            raise CompileError, msg
+
+    def create_static_lib(self, objects, output_libname,
+                          output_dir=None, debug=0, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            self.mkpath(os.path.dirname(output_filename))
+            self.spawn(self.archiver +
+                       [output_filename] +
+                       objects + self.objects)
+
+            # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+            # think the only major Unix that does.  Maybe we need some
+            # platform intelligence here to skip ranlib if it's not
+            # needed -- or maybe Python's configure script took care of
+            # it for us, hence the check for leading colon.
+            if self.ranlib:
+                try:
+                    self.spawn(self.ranlib + [output_filename])
+                except DistutilsExecError, msg:
+                    raise LibError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects,
+             output_filename, output_dir=None, libraries=None,
+             library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=0, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if type(output_dir) not in (StringType, NoneType):
+            raise TypeError, "'output_dir' must be a string or None"
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            ld_args = (objects + self.objects +
+                       lib_opts + ['-o', output_filename])
+            if debug:
+                ld_args[:0] = ['-g']
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                if target_desc == CCompiler.EXECUTABLE:
+                    linker = self.linker_exe[:]
+                else:
+                    linker = self.linker_so[:]
+                if target_lang == "c++" and self.compiler_cxx:
+                    # skip over environment variable settings if /usr/bin/env
+                    # is used to set up the linker's environment.
+                    # This is needed on OSX. Note: this assumes that the
+                    # normal and C++ compiler have the same environment
+                    # settings.
+                    i = 0
+                    if os.path.basename(linker[0]) == "env":
+                        i = 1
+                        while '=' in linker[i]:
+                            i = i + 1
+
+                    linker[i] = self.compiler_cxx[i]
+
+                if sys.platform == 'darwin':
+                    linker = _darwin_compiler_fixup(linker, ld_args)
+
+                self.spawn(linker + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def _is_gcc(self, compiler_name):
+        return "gcc" in compiler_name or "g++" in compiler_name
+
+    def runtime_library_dir_option(self, dir):
+        # XXX Hackish, at the very least.  See Python bug #445902:
+        # http://sourceforge.net/tracker/index.php
+        #   ?func=detail&aid=445902&group_id=5470&atid=105470
+        # Linkers on different platforms need different options to
+        # specify that directories need to be added to the list of
+        # directories searched for dependencies when a dynamic library
+        # is sought.  GCC has to be told to pass the -R option through
+        # to the linker, whereas other compilers just know this.
+        # Other compilers may need something slightly different.  At
+        # this time, there's no way to determine this information from
+        # the configuration data stored in the Python installation, so
+        # we use this hack.
+        compiler = os.path.basename(sysconfig.get_config_var("CC"))
+        if sys.platform[:6] == "darwin":
+            # MacOSX's linker doesn't understand the -R flag at all
+            return "-L" + dir
+        elif sys.platform[:5] == "hp-ux":
+            if self._is_gcc(compiler):
+                return ["-Wl,+s", "-L" + dir]
+            return ["+s", "-L" + dir]
+        elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
+            return ["-rpath", dir]
+        elif self._is_gcc(compiler):
+            return "-Wl,-R" + dir
+        else:
+            return "-R" + dir
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+    def find_library_file(self, dirs, lib, debug=0):
+        shared_f = self.library_filename(lib, lib_type='shared')
+        dylib_f = self.library_filename(lib, lib_type='dylib')
+        static_f = self.library_filename(lib, lib_type='static')
+
+        if sys.platform == 'darwin':
+            # On OSX users can specify an alternate SDK using
+            # '-isysroot', calculate the SDK root if it is specified
+            # (and use it further on)
+            cflags = sysconfig.get_config_var('CFLAGS')
+            m = re.search(r'-isysroot\s+(\S+)', cflags)
+            if m is None:
+                sysroot = '/'
+            else:
+                sysroot = m.group(1)
+
+
+
+        for dir in dirs:
+            shared = os.path.join(dir, shared_f)
+            dylib = os.path.join(dir, dylib_f)
+            static = os.path.join(dir, static_f)
+
+            if sys.platform == 'darwin' and (
+                dir.startswith('/System/') or (
+                dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
+
+                shared = os.path.join(sysroot, dir[1:], shared_f)
+                dylib = os.path.join(sysroot, dir[1:], dylib_f)
+                static = os.path.join(sysroot, dir[1:], static_f)
+
+            # We're second-guessing the linker here, with not much hard
+            # data to go on: GCC seems to prefer the shared library, so I'm
+            # assuming that *all* Unix C compilers do.  And of course I'm
+            # ignoring even GCC's "-static" option.  So sue me.
+            if os.path.exists(dylib):
+                return dylib
+            elif os.path.exists(shared):
+                return shared
+            elif os.path.exists(static):
+                return static
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
diff --git a/lib/distutils/distutils/util.py b/lib/distutils/distutils/util.py
new file mode 100644
index 0000000..6c49f0b
--- /dev/null
+++ b/lib/distutils/distutils/util.py
@@ -0,0 +1,567 @@
+"""distutils.util
+
+Miscellaneous utility functions -- anything that doesn't fit into
+one of the other *util.py modules.
+"""
+
+__revision__ = "$Id$"
+
+import sys, os, string, re
+from distutils.errors import DistutilsPlatformError
+from distutils.dep_util import newer
+from distutils.spawn import spawn
+from distutils import log
+from distutils.errors import DistutilsByteCompileError
+
+def get_platform ():
+    """Return a string that identifies the current platform.  This is used
+    mainly to distinguish platform-specific build directories and
+    platform-specific built distributions.  Typically includes the OS name
+    and version and the architecture (as supplied by 'os.uname()'),
+    although the exact information included depends on the OS; eg. for IRIX
+    the architecture isn't particularly important (IRIX only runs on SGI
+    hardware), but for Linux the kernel version isn't particularly
+    important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+       irix-5.3
+       irix64-6.2
+
+    Windows will return one of:
+       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+       win-ia64 (64bit Windows on Itanium)
+       win32 (all others - specifically, sys.platform is returned)
+
+    For other non-POSIX platforms, currently just returns 'sys.platform'.
+    """
+    if os.name == 'nt':
+        # sniff sys.version for architecture.
+        prefix = " bit ("
+        i = string.find(sys.version, prefix)
+        if i == -1:
+            return sys.platform
+        j = string.find(sys.version, ")", i)
+        look = sys.version[i+len(prefix):j].lower()
+        if look=='amd64':
+            return 'win-amd64'
+        if look=='itanium':
+            return 'win-ia64'
+        return sys.platform
+
+    if os.name != "posix" or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha,
+        # Mac OS is M68k or PPC, etc.
+        return sys.platform
+
+    # Try to distinguish various flavours of Unix
+
+    (osname, host, release, version, machine) = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters
+    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
+    osname = string.lower(osname)
+    osname = string.replace(osname, '/', '')
+    machine = string.replace(machine, ' ', '_')
+    machine = string.replace(machine, '/', '-')
+
+    if osname[:5] == "linux":
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return  "%s-%s" % (osname, machine)
+    elif osname[:5] == "sunos":
+        if release[0] >= "5":           # SunOS 5 == Solaris 2
+            osname = "solaris"
+            release = "%d.%s" % (int(release[0]) - 3, release[2:])
+        # fall through to standard osname-release-machine representation
+    elif osname[:4] == "irix":              # could be "irix64"!
+        return "%s-%s" % (osname, release)
+    elif osname[:3] == "aix":
+        return "%s-%s.%s" % (osname, version, release)
+    elif osname[:6] == "cygwin":
+        osname = "cygwin"
+        rel_re = re.compile (r'[\d.]+')
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+    elif osname[:6] == "darwin":
+        #
+        # For our purposes, we'll assume that the system version from
+        # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
+        # to. This makes the compatibility story a bit more sane because the
+        # machine is going to compile and link as if it were
+        # MACOSX_DEPLOYMENT_TARGET.
+        from distutils.sysconfig import get_config_vars
+        cfgvars = get_config_vars()
+
+        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
+
+        if 1:
+            # Always calculate the release of the running machine,
+            # needed to determine if we can build fat binaries or not.
+
+            macrelease = macver
+            # Get the system version. Reading this plist is a documented
+            # way to get the system version (see the documentation for
+            # the Gestalt Manager)
+            try:
+                f = open('/System/Library/CoreServices/SystemVersion.plist')
+            except IOError:
+                # We're on a plain darwin box, fall back to the default
+                # behaviour.
+                pass
+            else:
+                try:
+                    m = re.search(
+                            r'<key>ProductUserVisibleVersion</key>\s*' +
+                            r'<string>(.*?)</string>', f.read())
+                    if m is not None:
+                        macrelease = '.'.join(m.group(1).split('.')[:2])
+                    # else: fall back to the default behaviour
+                finally:
+                    f.close()
+
+        if not macver:
+            macver = macrelease
+
+        if macver:
+            from distutils.sysconfig import get_config_vars
+            release = macver
+            osname = "macosx"
+
+            if (macrelease + '.') >= '10.4.' and \
+                    '-arch' in get_config_vars().get('CFLAGS', '').strip():
+                # The universal build will build fat binaries, but not on
+                # systems before 10.4
+                #
+                # Try to detect 4-way universal builds, those have machine-type
+                # 'universal' instead of 'fat'.
+
+                machine = 'fat'
+                cflags = get_config_vars().get('CFLAGS')
+
+                archs = re.findall('-arch\s+(\S+)', cflags)
+                archs = tuple(sorted(set(archs)))
+
+                if len(archs) == 1:
+                    machine = archs[0]
+                elif archs == ('i386', 'ppc'):
+                    machine = 'fat'
+                elif archs == ('i386', 'x86_64'):
+                    machine = 'intel'
+                elif archs == ('i386', 'ppc', 'x86_64'):
+                    machine = 'fat3'
+                elif archs == ('ppc64', 'x86_64'):
+                    machine = 'fat64'
+                elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
+                    machine = 'universal'
+                else:
+                    raise ValueError(
+                       "Don't know machine value for archs=%r"%(archs,))
+
+            elif machine == 'i386':
+                # On OSX the machine type returned by uname is always the
+                # 32-bit variant, even if the executable architecture is
+                # the 64-bit variant
+                if sys.maxint >= 2**32:
+                    machine = 'x86_64'
+
+            elif machine in ('PowerPC', 'Power_Macintosh'):
+                # Pick a sane name for the PPC architecture.
+                machine = 'ppc'
+
+                # See 'i386' case
+                if sys.maxint >= 2**32:
+                    machine = 'ppc64'
+
+    return "%s-%s-%s" % (osname, release, machine)
+
+# get_platform ()
+
+
+def convert_path (pathname):
+    """Return 'pathname' as a name that will work on the native filesystem,
+    i.e. split it on '/' and put it back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError, "path '%s' cannot be absolute" % pathname
+    if pathname[-1] == '/':
+        raise ValueError, "path '%s' cannot end with '/'" % pathname
+
+    paths = string.split(pathname, '/')
+    while '.' in paths:
+        paths.remove('.')
+    if not paths:
+        return os.curdir
+    return os.path.join(*paths)
+
+# convert_path ()
+
+
+def change_root (new_root, pathname):
+    """Return 'pathname' with 'new_root' prepended.  If 'pathname' is
+    relative, this is equivalent to "os.path.join(new_root,pathname)".
+    Otherwise, it requires making 'pathname' relative and then joining the
+    two, which is tricky on DOS/Windows and Mac OS.
+    """
+    if os.name == 'posix':
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == 'nt':
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == '\\':
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    elif os.name == 'os2':
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == os.sep:
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    else:
+        raise DistutilsPlatformError, \
+              "nothing known about platform '%s'" % os.name
+
+
+_environ_checked = 0
+def check_environ ():
+    """Ensure that 'os.environ' has all the environment variables we
+    guarantee that users can use in config files, command-line options,
+    etc.  Currently this includes:
+      HOME - user's home directory (Unix only)
+      PLAT - description of the current platform, including hardware
+             and OS (see 'get_platform()')
+    """
+    global _environ_checked
+    if _environ_checked:
+        return
+
+    if os.name == 'posix' and 'HOME' not in os.environ:
+        import pwd
+        os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+
+    if 'PLAT' not in os.environ:
+        os.environ['PLAT'] = get_platform()
+
+    _environ_checked = 1
+
+
+def subst_vars (s, local_vars):
+    """Perform shell/Perl-style variable substitution on 'string'.  Every
+    occurrence of '$' followed by a name is considered a variable, and
+    variable is substituted by the value found in the 'local_vars'
+    dictionary, or in 'os.environ' if it's not in 'local_vars'.
+    'os.environ' is first checked/augmented to guarantee that it contains
+    certain values: see 'check_environ()'.  Raise ValueError for any
+    variables not found in either 'local_vars' or 'os.environ'.
+    """
+    check_environ()
+    def _subst (match, local_vars=local_vars):
+        var_name = match.group(1)
+        if var_name in local_vars:
+            return str(local_vars[var_name])
+        else:
+            return os.environ[var_name]
+
+    try:
+        return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
+    except KeyError, var:
+        raise ValueError, "invalid variable '$%s'" % var
+
+# subst_vars ()
+
+
+def grok_environment_error (exc, prefix="error: "):
+    """Generate a useful error message from an EnvironmentError (IOError or
+    OSError) exception object.  Handles Python 1.5.1 and 1.5.2 styles, and
+    does what it can to deal with exception objects that don't have a
+    filename (which happens when the error is due to a two-file operation,
+    such as 'rename()' or 'link()'.  Returns the error message as a string
+    prefixed with 'prefix'.
+    """
+    # check for Python 1.5.2-style {IO,OS}Error exception objects
+    if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
+        if exc.filename:
+            error = prefix + "%s: %s" % (exc.filename, exc.strerror)
+        else:
+            # two-argument functions in posix module don't
+            # include the filename in the exception object!
+            error = prefix + "%s" % exc.strerror
+    else:
+        error = prefix + str(exc[-1])
+
+    return error
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+def _init_regex():
+    global _wordchars_re, _squote_re, _dquote_re
+    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+def split_quoted (s):
+    """Split a string up according to Unix shell-like rules for quotes and
+    backslashes.  In short: words are delimited by spaces, as long as those
+    spaces are not escaped by a backslash, or inside a quoted string.
+    Single and double quotes are equivalent, and the quote characters can
+    be backslash-escaped.  The backslash is stripped from any two-character
+    escape sequence, leaving only the escaped character.  The quote
+    characters are stripped from any quoted string.  Returns a list of
+    words.
+    """
+
+    # This is a nice algorithm for splitting up a single string, since it
+    # doesn't require character-by-character examination.  It was a little
+    # bit of a brain-bender to get it working right, though...
+    if _wordchars_re is None: _init_regex()
+
+    s = string.strip(s)
+    words = []
+    pos = 0
+
+    while s:
+        m = _wordchars_re.match(s, pos)
+        end = m.end()
+        if end == len(s):
+            words.append(s[:end])
+            break
+
+        if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
+            words.append(s[:end])       # we definitely have a word delimiter
+            s = string.lstrip(s[end:])
+            pos = 0
+
+        elif s[end] == '\\':            # preserve whatever is being escaped;
+                                        # will become part of the current word
+            s = s[:end] + s[end+1:]
+            pos = end+1
+
+        else:
+            if s[end] == "'":           # slurp singly-quoted string
+                m = _squote_re.match(s, end)
+            elif s[end] == '"':         # slurp doubly-quoted string
+                m = _dquote_re.match(s, end)
+            else:
+                raise RuntimeError, \
+                      "this can't happen (bad char '%c')" % s[end]
+
+            if m is None:
+                raise ValueError, \
+                      "bad string (mismatched %s quotes?)" % s[end]
+
+            (beg, end) = m.span()
+            s = s[:beg] + s[beg+1:end-1] + s[end:]
+            pos = m.end() - 2
+
+        if pos >= len(s):
+            words.append(s)
+            break
+
+    return words
+
+# split_quoted ()
+
+
+def execute (func, args, msg=None, verbose=0, dry_run=0):
+    """Perform some action that affects the outside world (eg.  by
+    writing to the filesystem).  Such actions are special because they
+    are disabled by the 'dry_run' flag.  This method takes care of all
+    that bureaucracy for you; all you have to do is supply the
+    function to call and an argument tuple for it (to embody the
+    "external action" being performed), and an optional message to
+    print.
+    """
+    if msg is None:
+        msg = "%s%r" % (func.__name__, args)
+        if msg[-2:] == ',)':        # correct for singleton tuple
+            msg = msg[0:-2] + ')'
+
+    log.info(msg)
+    if not dry_run:
+        func(*args)
+
+
+def strtobool (val):
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = string.lower(val)
+    if val in ('y', 'yes', 't', 'true', 'on', '1'):
+        return 1
+    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+        return 0
+    else:
+        raise ValueError, "invalid truth value %r" % (val,)
+
+
+def byte_compile (py_files,
+                  optimize=0, force=0,
+                  prefix=None, base_dir=None,
+                  verbose=1, dry_run=0,
+                  direct=None):
+    """Byte-compile a collection of Python source files to either .pyc
+    or .pyo files in the same directory.  'py_files' is a list of files
+    to compile; any files that don't end in ".py" are silently skipped.
+    'optimize' must be one of the following:
+      0 - don't optimize (generate .pyc)
+      1 - normal optimization (like "python -O")
+      2 - extra optimization (like "python -OO")
+    If 'force' is true, all files are recompiled regardless of
+    timestamps.
+
+    The source filename encoded in each bytecode file defaults to the
+    filenames listed in 'py_files'; you can modify these with 'prefix' and
+    'basedir'.  'prefix' is a string that will be stripped off of each
+    source filename, and 'base_dir' is a directory name that will be
+    prepended (after 'prefix' is stripped).  You can supply either or both
+    (or neither) of 'prefix' and 'base_dir', as you wish.
+
+    If 'dry_run' is true, doesn't actually do anything that would
+    affect the filesystem.
+
+    Byte-compilation is either done directly in this interpreter process
+    with the standard py_compile module, or indirectly by writing a
+    temporary script and executing it.  Normally, you should let
+    'byte_compile()' figure out to use direct compilation or not (see
+    the source for details).  The 'direct' flag is used by the script
+    generated in indirect mode; unless you know what you're doing, leave
+    it set to None.
+    """
+    # nothing is done if sys.dont_write_bytecode is True
+    if sys.dont_write_bytecode:
+        raise DistutilsByteCompileError('byte-compiling is disabled.')
+
+    # First, if the caller didn't force us into direct or indirect mode,
+    # figure out which mode we should be in.  We take a conservative
+    # approach: choose direct mode *only* if the current interpreter is
+    # in debug mode and optimize is 0.  If we're not in debug mode (-O
+    # or -OO), we don't know which level of optimization this
+    # interpreter is running with, so we can't do direct
+    # byte-compilation and be certain that it's the right thing.  Thus,
+    # always compile indirectly if the current interpreter is in either
+    # optimize mode, or if either optimization level was requested by
+    # the caller.
+    if direct is None:
+        direct = (__debug__ and optimize == 0)
+
+    # "Indirect" byte-compilation: write a temporary script and then
+    # run it with the appropriate flags.
+    if not direct:
+        try:
+            from tempfile import mkstemp
+            (script_fd, script_name) = mkstemp(".py")
+        except ImportError:
+            from tempfile import mktemp
+            (script_fd, script_name) = None, mktemp(".py")
+        log.info("writing byte-compilation script '%s'", script_name)
+        if not dry_run:
+            if script_fd is not None:
+                script = os.fdopen(script_fd, "w")
+            else:
+                script = open(script_name, "w")
+
+            script.write("""\
+from distutils.util import byte_compile
+files = [
+""")
+
+            # XXX would be nice to write absolute filenames, just for
+            # safety's sake (script should be more robust in the face of
+            # chdir'ing before running it).  But this requires abspath'ing
+            # 'prefix' as well, and that breaks the hack in build_lib's
+            # 'byte_compile()' method that carefully tacks on a trailing
+            # slash (os.sep really) to make sure the prefix here is "just
+            # right".  This whole prefix business is rather delicate -- the
+            # problem is that it's really a directory, but I'm treating it
+            # as a dumb string, so trailing slashes and so forth matter.
+
+            #py_files = map(os.path.abspath, py_files)
+            #if prefix:
+            #    prefix = os.path.abspath(prefix)
+
+            script.write(string.join(map(repr, py_files), ",\n") + "]\n")
+            script.write("""
+byte_compile(files, optimize=%r, force=%r,
+             prefix=%r, base_dir=%r,
+             verbose=%r, dry_run=0,
+             direct=1)
+""" % (optimize, force, prefix, base_dir, verbose))
+
+            script.close()
+
+        cmd = [sys.executable, script_name]
+        if optimize == 1:
+            cmd.insert(1, "-O")
+        elif optimize == 2:
+            cmd.insert(1, "-OO")
+        spawn(cmd, dry_run=dry_run)
+        execute(os.remove, (script_name,), "removing %s" % script_name,
+                dry_run=dry_run)
+
+    # "Direct" byte-compilation: use the py_compile module to compile
+    # right here, right now.  Note that the script generated in indirect
+    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+    # cross-process recursion.  Hey, it works!
+    else:
+        from py_compile import compile
+
+        for file in py_files:
+            if file[-3:] != ".py":
+                # This lets us be lazy and not filter filenames in
+                # the "install_lib" command.
+                continue
+
+            # Terminology from the py_compile module:
+            #   cfile - byte-compiled file
+            #   dfile - purported source filename (same as 'file' by default)
+            cfile = file + (__debug__ and "c" or "o")
+            dfile = file
+            if prefix:
+                if file[:len(prefix)] != prefix:
+                    raise ValueError, \
+                          ("invalid prefix: filename %r doesn't start with %r"
+                           % (file, prefix))
+                dfile = dfile[len(prefix):]
+            if base_dir:
+                dfile = os.path.join(base_dir, dfile)
+
+            cfile_base = os.path.basename(cfile)
+            if direct:
+                if force or newer(file, cfile):
+                    log.info("byte-compiling %s to %s", file, cfile_base)
+                    if not dry_run:
+                        compile(file, cfile, dfile)
+                else:
+                    log.debug("skipping byte-compilation of %s to %s",
+                              file, cfile_base)
+
+# byte_compile ()
+
+def rfc822_escape (header):
+    """Return a version of the string escaped for inclusion in an
+    RFC-822 header, by ensuring there are 8 spaces space after each newline.
+    """
+    lines = string.split(header, '\n')
+    header = string.join(lines, '\n' + 8*' ')
+    return header
diff --git a/lib/distutils/distutils/version.py b/lib/distutils/distutils/version.py
new file mode 100644
index 0000000..0fb5b6e
--- /dev/null
+++ b/lib/distutils/distutils/version.py
@@ -0,0 +1,299 @@
+#
+# distutils/version.py
+#
+# Implements multiple version numbering conventions for the
+# Python Module Distribution Utilities.
+#
+# $Id$
+#
+
+"""Provides classes to represent module version numbers (one class for
+each style of version numbering).  There are currently two such classes
+implemented: StrictVersion and LooseVersion.
+
+Every version number class implements the following interface:
+  * the 'parse' method takes a string and parses it to some internal
+    representation; if the string is an invalid version number,
+    'parse' raises a ValueError exception
+  * the class constructor takes an optional string argument which,
+    if supplied, is passed to 'parse'
+  * __str__ reconstructs the string that was passed to 'parse' (or
+    an equivalent string -- ie. one that will generate an equivalent
+    version number instance)
+  * __repr__ generates Python code to recreate the version number instance
+  * __cmp__ compares the current instance with either another instance
+    of the same class or a string (which will be parsed to an instance
+    of the same class, thus must follow the same rules)
+"""
+
+import string, re
+from types import StringType
+
+class Version:
+    """Abstract base class for version numbering classes.  Just provides
+    constructor (__init__) and reproducer (__repr__), because those
+    seem to be the same for all version numbering classes.
+    """
+
+    def __init__ (self, vstring=None):
+        if vstring:
+            self.parse(vstring)
+
+    def __repr__ (self):
+        return "%s ('%s')" % (self.__class__.__name__, str(self))
+
+
+# Interface for version-number classes -- must be implemented
+# by the following classes (the concrete ones -- Version should
+# be treated as an abstract class).
+#    __init__ (string) - create and take same action as 'parse'
+#                        (string parameter is optional)
+#    parse (string)    - convert a string representation to whatever
+#                        internal representation is appropriate for
+#                        this style of version numbering
+#    __str__ (self)    - convert back to a string; should be very similar
+#                        (if not identical to) the string supplied to parse
+#    __repr__ (self)   - generate Python code to recreate
+#                        the instance
+#    __cmp__ (self, other) - compare two version numbers ('other' may
+#                        be an unparsed version string, or another
+#                        instance of your version class)
+
+
+class StrictVersion (Version):
+
+    """Version numbering for anal retentives and software idealists.
+    Implements the standard interface for version number classes as
+    described above.  A version number consists of two or three
+    dot-separated numeric components, with an optional "pre-release" tag
+    on the end.  The pre-release tag consists of the letter 'a' or 'b'
+    followed by a number.  If the numeric components of two version
+    numbers are equal, then one with a pre-release tag will always
+    be deemed earlier (lesser) than one without.
+
+    The following are valid version numbers (shown in the order that
+    would be obtained by sorting according to the supplied cmp function):
+
+        0.4       0.4.0  (these two are equivalent)
+        0.4.1
+        0.5a1
+        0.5b3
+        0.5
+        0.9.6
+        1.0
+        1.0.4a3
+        1.0.4b1
+        1.0.4
+
+    The following are examples of invalid version numbers:
+
+        1
+        2.7.2.2
+        1.3.a4
+        1.3pl1
+        1.3c4
+
+    The rationale for this version numbering system will be explained
+    in the distutils documentation.
+    """
+
+    version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
+                            re.VERBOSE)
+
+
+    def parse (self, vstring):
+        match = self.version_re.match(vstring)
+        if not match:
+            raise ValueError, "invalid version number '%s'" % vstring
+
+        (major, minor, patch, prerelease, prerelease_num) = \
+            match.group(1, 2, 4, 5, 6)
+
+        if patch:
+            self.version = tuple(map(string.atoi, [major, minor, patch]))
+        else:
+            self.version = tuple(map(string.atoi, [major, minor]) + [0])
+
+        if prerelease:
+            self.prerelease = (prerelease[0], string.atoi(prerelease_num))
+        else:
+            self.prerelease = None
+
+
+    def __str__ (self):
+
+        if self.version[2] == 0:
+            vstring = string.join(map(str, self.version[0:2]), '.')
+        else:
+            vstring = string.join(map(str, self.version), '.')
+
+        if self.prerelease:
+            vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
+
+        return vstring
+
+
+    def __cmp__ (self, other):
+        if isinstance(other, StringType):
+            other = StrictVersion(other)
+
+        compare = cmp(self.version, other.version)
+        if (compare == 0):              # have to compare prerelease
+
+            # case 1: neither has prerelease; they're equal
+            # case 2: self has prerelease, other doesn't; other is greater
+            # case 3: self doesn't have prerelease, other does: self is greater
+            # case 4: both have prerelease: must compare them!
+
+            if (not self.prerelease and not other.prerelease):
+                return 0
+            elif (self.prerelease and not other.prerelease):
+                return -1
+            elif (not self.prerelease and other.prerelease):
+                return 1
+            elif (self.prerelease and other.prerelease):
+                return cmp(self.prerelease, other.prerelease)
+
+        else:                           # numeric versions don't match --
+            return compare              # prerelease stuff doesn't matter
+
+
+# end class StrictVersion
+
+
+# The rules according to Greg Stein:
+# 1) a version number has 1 or more numbers separated by a period or by
+#    sequences of letters. If only periods, then these are compared
+#    left-to-right to determine an ordering.
+# 2) sequences of letters are part of the tuple for comparison and are
+#    compared lexicographically
+# 3) recognize the numeric components may have leading zeroes
+#
+# The LooseVersion class below implements these rules: a version number
+# string is split up into a tuple of integer and string components, and
+# comparison is a simple tuple comparison.  This means that version
+# numbers behave in a predictable and obvious way, but a way that might
+# not necessarily be how people *want* version numbers to behave.  There
+# wouldn't be a problem if people could stick to purely numeric version
+# numbers: just split on period and compare the numbers as tuples.
+# However, people insist on putting letters into their version numbers;
+# the most common purpose seems to be:
+#   - indicating a "pre-release" version
+#     ('alpha', 'beta', 'a', 'b', 'pre', 'p')
+#   - indicating a post-release patch ('p', 'pl', 'patch')
+# but of course this can't cover all version number schemes, and there's
+# no way to know what a programmer means without asking him.
+#
+# The problem is what to do with letters (and other non-numeric
+# characters) in a version number.  The current implementation does the
+# obvious and predictable thing: keep them as strings and compare
+# lexically within a tuple comparison.  This has the desired effect if
+# an appended letter sequence implies something "post-release":
+# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
+#
+# However, if letters in a version number imply a pre-release version,
+# the "obvious" thing isn't correct.  Eg. you would expect that
+# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
+# implemented here, this just isn't so.
+#
+# Two possible solutions come to mind.  The first is to tie the
+# comparison algorithm to a particular set of semantic rules, as has
+# been done in the StrictVersion class above.  This works great as long
+# as everyone can go along with bondage and discipline.  Hopefully a
+# (large) subset of Python module programmers will agree that the
+# particular flavour of bondage and discipline provided by StrictVersion
+# provides enough benefit to be worth using, and will submit their
+# version numbering scheme to its domination.  The free-thinking
+# anarchists in the lot will never give in, though, and something needs
+# to be done to accommodate them.
+#
+# Perhaps a "moderately strict" version class could be implemented that
+# lets almost anything slide (syntactically), and makes some heuristic
+# assumptions about non-digits in version number strings.  This could
+# sink into special-case-hell, though; if I was as talented and
+# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
+# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
+# just as happy dealing with things like "2g6" and "1.13++".  I don't
+# think I'm smart enough to do it right though.
+#
+# In any case, I've coded the test suite for this module (see
+# ../test/test_version.py) specifically to fail on things like comparing
+# "1.2a2" and "1.2".  That's not because the *code* is doing anything
+# wrong, it's because the simple, obvious design doesn't match my
+# complicated, hairy expectations for real-world version numbers.  It
+# would be a snap to fix the test suite to say, "Yep, LooseVersion does
+# the Right Thing" (ie. the code matches the conception).  But I'd rather
+# have a conception that matches common notions about version numbers.
+
+class LooseVersion (Version):
+
+    """Version numbering for anarchists and software realists.
+    Implements the standard interface for version number classes as
+    described above.  A version number consists of a series of numbers,
+    separated by either periods or strings of letters.  When comparing
+    version numbers, the numeric components will be compared
+    numerically, and the alphabetic components lexically.  The following
+    are all valid version numbers, in no particular order:
+
+        1.5.1
+        1.5.2b2
+        161
+        3.10a
+        8.02
+        3.4j
+        1996.07.12
+        3.2.pl0
+        3.1.1.6
+        2g6
+        11g
+        0.960923
+        2.2beta29
+        1.13++
+        5.5.kw
+        2.0b1pl0
+
+    In fact, there is no such thing as an invalid version number under
+    this scheme; the rules for comparison are simple and predictable,
+    but may not always give the results you want (for some definition
+    of "want").
+    """
+
+    component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
+
+    def __init__ (self, vstring=None):
+        if vstring:
+            self.parse(vstring)
+
+
+    def parse (self, vstring):
+        # I've given up on thinking I can reconstruct the version string
+        # from the parsed tuple -- so I just store the string here for
+        # use by __str__
+        self.vstring = vstring
+        components = filter(lambda x: x and x != '.',
+                            self.component_re.split(vstring))
+        for i in range(len(components)):
+            try:
+                components[i] = int(components[i])
+            except ValueError:
+                pass
+
+        self.version = components
+
+
+    def __str__ (self):
+        return self.vstring
+
+
+    def __repr__ (self):
+        return "LooseVersion ('%s')" % str(self)
+
+
+    def __cmp__ (self, other):
+        if isinstance(other, StringType):
+            other = LooseVersion(other)
+
+        return cmp(self.version, other.version)
+
+
+# end class LooseVersion
diff --git a/lib/distutils/distutils/versionpredicate.py b/lib/distutils/distutils/versionpredicate.py
new file mode 100644
index 0000000..ba8b6c0
--- /dev/null
+++ b/lib/distutils/distutils/versionpredicate.py
@@ -0,0 +1,164 @@
+"""Module for parsing and testing package version predicate strings.
+"""
+import re
+import distutils.version
+import operator
+
+
+re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)")
+# (package) (rest)
+
+re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
+re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
+# (comp) (version)
+
+
+def splitUp(pred):
+    """Parse a single version comparison.
+
+    Return (comparison string, StrictVersion)
+    """
+    res = re_splitComparison.match(pred)
+    if not res:
+        raise ValueError("bad package restriction syntax: %r" % pred)
+    comp, verStr = res.groups()
+    return (comp, distutils.version.StrictVersion(verStr))
+
+compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
+           ">": operator.gt, ">=": operator.ge, "!=": operator.ne}
+
+class VersionPredicate:
+    """Parse and test package version predicates.
+
+    >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
+
+    The `name` attribute provides the full dotted name that is given::
+
+    >>> v.name
+    'pyepat.abc'
+
+    The str() of a `VersionPredicate` provides a normalized
+    human-readable version of the expression::
+
+    >>> print v
+    pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
+
+    The `satisfied_by()` method can be used to determine with a given
+    version number is included in the set described by the version
+    restrictions::
+
+    >>> v.satisfied_by('1.1')
+    True
+    >>> v.satisfied_by('1.4')
+    True
+    >>> v.satisfied_by('1.0')
+    False
+    >>> v.satisfied_by('4444.4')
+    False
+    >>> v.satisfied_by('1555.1b3')
+    False
+
+    `VersionPredicate` is flexible in accepting extra whitespace::
+
+    >>> v = VersionPredicate(' pat( ==  0.1  )  ')
+    >>> v.name
+    'pat'
+    >>> v.satisfied_by('0.1')
+    True
+    >>> v.satisfied_by('0.2')
+    False
+
+    If any version numbers passed in do not conform to the
+    restrictions of `StrictVersion`, a `ValueError` is raised::
+
+    >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
+    Traceback (most recent call last):
+      ...
+    ValueError: invalid version number '1.2zb3'
+
+    It the module or package name given does not conform to what's
+    allowed as a legal module or package name, `ValueError` is
+    raised::
+
+    >>> v = VersionPredicate('foo-bar')
+    Traceback (most recent call last):
+      ...
+    ValueError: expected parenthesized list: '-bar'
+
+    >>> v = VersionPredicate('foo bar (12.21)')
+    Traceback (most recent call last):
+      ...
+    ValueError: expected parenthesized list: 'bar (12.21)'
+
+    """
+
+    def __init__(self, versionPredicateStr):
+        """Parse a version predicate string.
+        """
+        # Fields:
+        #    name:  package name
+        #    pred:  list of (comparison string, StrictVersion)
+
+        versionPredicateStr = versionPredicateStr.strip()
+        if not versionPredicateStr:
+            raise ValueError("empty package restriction")
+        match = re_validPackage.match(versionPredicateStr)
+        if not match:
+            raise ValueError("bad package name in %r" % versionPredicateStr)
+        self.name, paren = match.groups()
+        paren = paren.strip()
+        if paren:
+            match = re_paren.match(paren)
+            if not match:
+                raise ValueError("expected parenthesized list: %r" % paren)
+            str = match.groups()[0]
+            self.pred = [splitUp(aPred) for aPred in str.split(",")]
+            if not self.pred:
+                raise ValueError("empty parenthesized list in %r"
+                                 % versionPredicateStr)
+        else:
+            self.pred = []
+
+    def __str__(self):
+        if self.pred:
+            seq = [cond + " " + str(ver) for cond, ver in self.pred]
+            return self.name + " (" + ", ".join(seq) + ")"
+        else:
+            return self.name
+
+    def satisfied_by(self, version):
+        """True if version is compatible with all the predicates in self.
+        The parameter version must be acceptable to the StrictVersion
+        constructor.  It may be either a string or StrictVersion.
+        """
+        for cond, ver in self.pred:
+            if not compmap[cond](version, ver):
+                return False
+        return True
+
+
+_provision_rx = None
+
+def split_provision(value):
+    """Return the name and optional version number of a provision.
+
+    The version number, if given, will be returned as a `StrictVersion`
+    instance, otherwise it will be `None`.
+
+    >>> split_provision('mypkg')
+    ('mypkg', None)
+    >>> split_provision(' mypkg( 1.2 ) ')
+    ('mypkg', StrictVersion ('1.2'))
+    """
+    global _provision_rx
+    if _provision_rx is None:
+        _provision_rx = re.compile(
+            "([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$")
+    value = value.strip()
+    m = _provision_rx.match(value)
+    if not m:
+        raise ValueError("illegal provides specification: %r" % value)
+    ver = m.group(2) or None
+    if ver:
+        ver = distutils.version.StrictVersion(ver)
+    return m.group(1), ver
diff --git a/lib/django-1.4/LICENSE b/lib/django-1.4/LICENSE
index 5f4f225..d0c3f28 100644
--- a/lib/django-1.4/LICENSE
+++ b/lib/django-1.4/LICENSE
@@ -4,10 +4,10 @@
 Redistribution and use in source and binary forms, with or without modification,
 are permitted provided that the following conditions are met:
 
-    1. Redistributions of source code must retain the above copyright notice,
+    1. Redistributions of source code must retain the above copyright notice, 
        this list of conditions and the following disclaimer.
-
-    2. Redistributions in binary form must reproduce the above copyright
+    
+    2. Redistributions in binary form must reproduce the above copyright 
        notice, this list of conditions and the following disclaimer in the
        documentation and/or other materials provided with the distribution.
 
diff --git a/lib/django-1.4/PKG-INFO b/lib/django-1.4/PKG-INFO
index 00e1795..3a4e20f 100644
--- a/lib/django-1.4/PKG-INFO
+++ b/lib/django-1.4/PKG-INFO
@@ -1,12 +1,12 @@
-Metadata-Version: 1.0
+Metadata-Version: 1.1
 Name: Django
-Version: 1.4.3
+Version: 1.4.13
 Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
 Home-page: http://www.djangoproject.com/
 Author: Django Software Foundation
 Author-email: foundation@djangoproject.com
 License: UNKNOWN
-Download-URL: https://www.djangoproject.com/m/releases/1.4/Django-1.4.3.tar.gz
+Download-URL: https://www.djangoproject.com/m/releases/1.4/Django-1.4.13.tar.gz
 Description: UNKNOWN
 Platform: UNKNOWN
 Classifier: Development Status :: 5 - Production/Stable
diff --git a/lib/django-1.4/django/__init__.py b/lib/django-1.4/django/__init__.py
index 2c2cfb0..1c3e27c 100644
--- a/lib/django-1.4/django/__init__.py
+++ b/lib/django-1.4/django/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (1, 4, 3, 'final', 0)
+VERSION = (1, 4, 13, 'final', 0)
 
 def get_version(version=None):
     """Derives a PEP386-compliant version number from VERSION."""
diff --git a/lib/django-1.4/django/conf/global_settings.py b/lib/django-1.4/django/conf/global_settings.py
index bd85c12..6512e4e 100644
--- a/lib/django-1.4/django/conf/global_settings.py
+++ b/lib/django-1.4/django/conf/global_settings.py
@@ -29,6 +29,10 @@
 #   * Receive x-headers
 INTERNAL_IPS = ()
 
+# Hosts/domain names that are valid for this site.
+# "*" matches anything, ".example.com" matches example.com and all subdomains
+ALLOWED_HOSTS = ['*']
+
 # Local time zone for this installation. All choices can be found here:
 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
 # systems may support all possibilities). When USE_TZ is True, this is
@@ -512,6 +516,7 @@
     'django.contrib.auth.hashers.BCryptPasswordHasher',
     'django.contrib.auth.hashers.SHA1PasswordHasher',
     'django.contrib.auth.hashers.MD5PasswordHasher',
+    'django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher',
     'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
     'django.contrib.auth.hashers.CryptPasswordHasher',
 )
diff --git a/lib/django-1.4/django/conf/project_template/project_name/settings.py b/lib/django-1.4/django/conf/project_template/project_name/settings.py
index 0eccc4e..d74eccf 100644
--- a/lib/django-1.4/django/conf/project_template/project_name/settings.py
+++ b/lib/django-1.4/django/conf/project_template/project_name/settings.py
@@ -20,6 +20,10 @@
     }
 }
 
+# Hosts/domain names that are valid for this site; required if DEBUG is False
+# See https://docs.djangoproject.com/en/1.4/ref/settings/#allowed-hosts
+ALLOWED_HOSTS = []
+
 # Local time zone for this installation. Choices can be found here:
 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
 # although not all choices may be available on all operating systems.
diff --git a/lib/django-1.4/django/contrib/admin/options.py b/lib/django-1.4/django/contrib/admin/options.py
index 2071792..78a08cd 100644
--- a/lib/django-1.4/django/contrib/admin/options.py
+++ b/lib/django-1.4/django/contrib/admin/options.py
@@ -1317,15 +1317,21 @@
     def history_view(self, request, object_id, extra_context=None):
         "The 'history' admin view for this model."
         from django.contrib.admin.models import LogEntry
+        # First check if the user can see this history.
         model = self.model
+        obj = get_object_or_404(model, pk=unquote(object_id))
+
+        if not self.has_change_permission(request, obj):
+            raise PermissionDenied
+
+        # Then get the history for this object.
         opts = model._meta
         app_label = opts.app_label
         action_list = LogEntry.objects.filter(
             object_id = object_id,
             content_type__id__exact = ContentType.objects.get_for_model(model).id
         ).select_related().order_by('action_time')
-        # If no history was found, see whether this object even exists.
-        obj = get_object_or_404(model, pk=unquote(object_id))
+
         context = {
             'title': _('Change history: %s') % force_unicode(obj),
             'action_list': action_list,
diff --git a/lib/django-1.4/django/contrib/admin/tests.py b/lib/django-1.4/django/contrib/admin/tests.py
index 9d94127..a66e89f 100644
--- a/lib/django-1.4/django/contrib/admin/tests.py
+++ b/lib/django-1.4/django/contrib/admin/tests.py
@@ -49,6 +49,20 @@
             timeout
         )
 
+    def wait_page_loaded(self):
+        """
+        Block until page has started to load.
+        """
+        from selenium.common.exceptions import TimeoutException
+        try:
+            # Wait for the next page to be loaded
+            self.wait_loaded_tag('body')
+        except TimeoutException:
+            # IE7 occasionnally returns an error "Internet Explorer cannot
+            # display the webpage" and doesn't load the next page. We just
+            # ignore it.
+            pass
+
     def admin_login(self, username, password, login_url='/admin/'):
         """
         Helper function to log into the admin.
@@ -61,8 +75,7 @@
         login_text = _('Log in')
         self.selenium.find_element_by_xpath(
             '//input[@value="%s"]' % login_text).click()
-        # Wait for the next page to be loaded.
-        self.wait_loaded_tag('body')
+        self.wait_page_loaded()
 
     def get_css_value(self, selector, attribute):
         """
diff --git a/lib/django-1.4/django/contrib/auth/admin.py b/lib/django-1.4/django/contrib/auth/admin.py
index f14b3d2..336e90a 100644
--- a/lib/django-1.4/django/contrib/auth/admin.py
+++ b/lib/django-1.4/django/contrib/auth/admin.py
@@ -17,6 +17,8 @@
 from django.views.decorators.debug import sensitive_post_parameters
 
 csrf_protect_m = method_decorator(csrf_protect)
+sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())
+
 
 class GroupAdmin(admin.ModelAdmin):
     search_fields = ('name',)
@@ -83,7 +85,7 @@
              self.admin_site.admin_view(self.user_change_password))
         ) + super(UserAdmin, self).get_urls()
 
-    @sensitive_post_parameters()
+    @sensitive_post_parameters_m
     @csrf_protect_m
     @transaction.commit_on_success
     def add_view(self, request, form_url='', extra_context=None):
@@ -113,7 +115,7 @@
         return super(UserAdmin, self).add_view(request, form_url,
                                                extra_context)
 
-    @sensitive_post_parameters()
+    @sensitive_post_parameters_m
     def user_change_password(self, request, id, form_url=''):
         if not self.has_change_permission(request):
             raise PermissionDenied
@@ -170,4 +172,3 @@
 
 admin.site.register(Group, GroupAdmin)
 admin.site.register(User, UserAdmin)
-
diff --git a/lib/django-1.4/django/contrib/auth/hashers.py b/lib/django-1.4/django/contrib/auth/hashers.py
index 5824685..a9dbcc9 100644
--- a/lib/django-1.4/django/contrib/auth/hashers.py
+++ b/lib/django-1.4/django/contrib/auth/hashers.py
@@ -35,8 +35,14 @@
     password = smart_str(password)
     encoded = smart_str(encoded)
 
-    if len(encoded) == 32 and '$' not in encoded:
+    # Ancient versions of Django created plain MD5 passwords and accepted
+    # MD5 passwords with an empty salt.
+    if ((len(encoded) == 32 and '$' not in encoded) or
+            (len(encoded) == 37 and encoded.startswith('md5$$'))):
         hasher = get_hasher('unsalted_md5')
+    # Ancient versions of Django accepted SHA1 passwords with an empty salt.
+    elif len(encoded) == 46 and encoded.startswith('sha1$$'):
+        hasher = get_hasher('unsalted_sha1')
     else:
         algorithm = encoded.split('$', 1)[0]
         hasher = get_hasher(algorithm)
@@ -329,14 +335,48 @@
         ])
 
 
+class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
+    """
+    Very insecure algorithm that you should *never* use; stores SHA1 hashes
+    with an empty salt.
+
+    This class is implemented because Django used to accept such password
+    hashes. Some older Django installs still have these values lingering
+    around so we need to handle and upgrade them properly.
+    """
+    algorithm = "unsalted_sha1"
+
+    def salt(self):
+        return ''
+
+    def encode(self, password, salt):
+        assert salt == ''
+        hash = hashlib.sha1(password).hexdigest()
+        return 'sha1$$%s' % hash
+
+    def verify(self, password, encoded):
+        encoded_2 = self.encode(password, '')
+        return constant_time_compare(encoded, encoded_2)
+
+    def safe_summary(self, encoded):
+        assert encoded.startswith('sha1$$')
+        hash = encoded[6:]
+        return SortedDict([
+            (_('algorithm'), self.algorithm),
+            (_('hash'), mask_hash(hash)),
+        ])
+
+
 class UnsaltedMD5PasswordHasher(BasePasswordHasher):
     """
-    I am an incredibly insecure algorithm you should *never* use;
-    stores unsalted MD5 hashes without the algorithm prefix.
+    Incredibly insecure algorithm that you should *never* use; stores unsalted
+    MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
+    empty salt.
 
-    This class is implemented because Django used to store passwords
-    this way. Some older Django installs still have these values
-    lingering around so we need to handle and upgrade them properly.
+    This class is implemented because Django used to store passwords this way
+    and to accept such password hashes. Some older Django installs still have
+    these values lingering around so we need to handle and upgrade them
+    properly.
     """
     algorithm = "unsalted_md5"
 
@@ -344,9 +384,12 @@
         return ''
 
     def encode(self, password, salt):
+        assert salt == ''
         return hashlib.md5(password).hexdigest()
 
     def verify(self, password, encoded):
+        if len(encoded) == 37 and encoded.startswith('md5$$'):
+            encoded = encoded[5:]
         encoded_2 = self.encode(password, '')
         return constant_time_compare(encoded, encoded_2)
 
diff --git a/lib/django-1.4/django/contrib/auth/tests/hashers.py b/lib/django-1.4/django/contrib/auth/tests/hashers.py
index 865085a..cf383e3 100644
--- a/lib/django-1.4/django/contrib/auth/tests/hashers.py
+++ b/lib/django-1.4/django/contrib/auth/tests/hashers.py
@@ -1,5 +1,5 @@
 from django.conf.global_settings import PASSWORD_HASHERS as default_hashers
-from django.contrib.auth.hashers import (is_password_usable, 
+from django.contrib.auth.hashers import (is_password_usable,
     check_password, make_password, PBKDF2PasswordHasher, load_hashers,
     PBKDF2SHA1PasswordHasher, get_hasher, UNUSABLE_PASSWORD)
 from django.utils import unittest
@@ -14,6 +14,10 @@
 
 try:
     import bcrypt
+    # Django 1.4 works only with py-bcrypt, not with bcrypt. py-bcrypt has
+    # '_bcrypt' attribute, bcrypt doesn't.
+    if not hasattr(bcrypt, '_bcrypt'):
+        bcrypt = None
 except ImportError:
     bcrypt = None
 
@@ -31,7 +35,7 @@
 
     def test_pkbdf2(self):
         encoded = make_password('letmein', 'seasalt', 'pbkdf2_sha256')
-        self.assertEqual(encoded, 
+        self.assertEqual(encoded,
 'pbkdf2_sha256$10000$seasalt$FQCNpiZpTb0zub+HBsH6TOwyRxJ19FwvjbweatNmK/Y=')
         self.assertTrue(is_password_usable(encoded))
         self.assertTrue(check_password(u'letmein', encoded))
@@ -39,7 +43,7 @@
 
     def test_sha1(self):
         encoded = make_password('letmein', 'seasalt', 'sha1')
-        self.assertEqual(encoded, 
+        self.assertEqual(encoded,
 'sha1$seasalt$fec3530984afba6bade3347b7140d1a7da7da8c7')
         self.assertTrue(is_password_usable(encoded))
         self.assertTrue(check_password(u'letmein', encoded))
@@ -47,18 +51,33 @@
 
     def test_md5(self):
         encoded = make_password('letmein', 'seasalt', 'md5')
-        self.assertEqual(encoded, 
+        self.assertEqual(encoded,
                          'md5$seasalt$f5531bef9f3687d0ccf0f617f0e25573')
         self.assertTrue(is_password_usable(encoded))
         self.assertTrue(check_password(u'letmein', encoded))
         self.assertFalse(check_password('letmeinz', encoded))
 
     def test_unsalted_md5(self):
-        encoded = make_password('letmein', 'seasalt', 'unsalted_md5')
+        encoded = make_password('letmein', '', 'unsalted_md5')
         self.assertEqual(encoded, '0d107d09f5bbe40cade3de5c71e9e9b7')
         self.assertTrue(is_password_usable(encoded))
         self.assertTrue(check_password(u'letmein', encoded))
         self.assertFalse(check_password('letmeinz', encoded))
+        # Alternate unsalted syntax
+        alt_encoded = "md5$$%s" % encoded
+        self.assertTrue(is_password_usable(alt_encoded))
+        self.assertTrue(check_password(u'letmein', alt_encoded))
+        self.assertFalse(check_password('letmeinz', alt_encoded))
+
+    def test_unsalted_sha1(self):
+        encoded = make_password('letmein', '', 'unsalted_sha1')
+        self.assertEqual(encoded, 'sha1$$b7a875fc1ea228b9061041b7cec4bd3c52ab3ce3')
+        self.assertTrue(is_password_usable(encoded))
+        self.assertTrue(check_password('letmein', encoded))
+        self.assertFalse(check_password('letmeinz', encoded))
+        # Raw SHA1 isn't acceptable
+        alt_encoded = encoded[6:]
+        self.assertRaises(ValueError, check_password, 'letmein', alt_encoded)
 
     @skipUnless(crypt, "no crypt module to generate password.")
     def test_crypt(self):
@@ -93,14 +112,14 @@
     def test_low_level_pkbdf2(self):
         hasher = PBKDF2PasswordHasher()
         encoded = hasher.encode('letmein', 'seasalt')
-        self.assertEqual(encoded, 
+        self.assertEqual(encoded,
 'pbkdf2_sha256$10000$seasalt$FQCNpiZpTb0zub+HBsH6TOwyRxJ19FwvjbweatNmK/Y=')
         self.assertTrue(hasher.verify('letmein', encoded))
 
     def test_low_level_pbkdf2_sha1(self):
         hasher = PBKDF2SHA1PasswordHasher()
         encoded = hasher.encode('letmein', 'seasalt')
-        self.assertEqual(encoded, 
+        self.assertEqual(encoded,
 'pbkdf2_sha1$10000$seasalt$91JiNKgwADC8j2j86Ije/cc4vfQ=')
         self.assertTrue(hasher.verify('letmein', encoded))
 
diff --git a/lib/django-1.4/django/contrib/auth/tests/views.py b/lib/django-1.4/django/contrib/auth/tests/views.py
index d295bb8..2b72cd4 100644
--- a/lib/django-1.4/django/contrib/auth/tests/views.py
+++ b/lib/django-1.4/django/contrib/auth/tests/views.py
@@ -107,6 +107,7 @@
         self.assertEqual(len(mail.outbox), 1)
         self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
 
+    @override_settings(ALLOWED_HOSTS=['adminsite.com'])
     def test_admin_reset(self):
         "If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
         response = self.client.post('/admin_password_reset/',
@@ -306,9 +307,12 @@
 
         # Those URLs should not pass the security check
         for bad_url in ('http://example.com',
+                        'http:///example.com',
                         'https://example.com',
                         'ftp://exampel.com',
-                        '//example.com'):
+                        '///example.com',
+                        '//example.com',
+                        'javascript:alert("XSS")'):
 
             nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
                 'url': login_url,
@@ -328,7 +332,8 @@
                          '/view/?param=https://example.com',
                          '/view?param=ftp://exampel.com',
                          'view/?param=//example.com',
-                         'https:///',
+                         'https://testserver/',
+                         'HTTPS://testserver/',
                          '//testserver/',
                          '/url%20with%20spaces/'):  # see ticket #12534
             safe_url = '%(url)s?%(next)s=%(good_url)s' % {
@@ -464,9 +469,12 @@
 
         # Those URLs should not pass the security check
         for bad_url in ('http://example.com',
+                        'http:///example.com',
                         'https://example.com',
                         'ftp://exampel.com',
-                        '//example.com'):
+                        '///example.com',
+                        '//example.com',
+                        'javascript:alert("XSS")'):
             nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
                 'url': logout_url,
                 'next': REDIRECT_FIELD_NAME,
@@ -484,7 +492,8 @@
                          '/view/?param=https://example.com',
                          '/view?param=ftp://exampel.com',
                          'view/?param=//example.com',
-                         'https:///',
+                         'https://testserver/',
+                         'HTTPS://testserver/',
                          '//testserver/',
                          '/url%20with%20spaces/'):  # see ticket #12534
             safe_url = '%(url)s?%(next)s=%(good_url)s' % {
diff --git a/lib/django-1.4/django/contrib/contenttypes/tests.py b/lib/django-1.4/django/contrib/contenttypes/tests.py
index 3b7906c..66226a7 100644
--- a/lib/django-1.4/django/contrib/contenttypes/tests.py
+++ b/lib/django-1.4/django/contrib/contenttypes/tests.py
@@ -9,6 +9,7 @@
 from django.http import HttpRequest, Http404
 from django.test import TestCase
 from django.utils.encoding import smart_str
+from django.test.utils import override_settings
 
 
 class FooWithoutUrl(models.Model):
@@ -114,6 +115,7 @@
             FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
         })
 
+    @override_settings(ALLOWED_HOSTS=['example.com'])
     def test_shortcut_view(self):
         """
         Check that the shortcut view (used for the admin "view on site"
diff --git a/lib/django-1.4/django/contrib/gis/geos/tests/test_geos.py b/lib/django-1.4/django/contrib/gis/geos/tests/test_geos.py
index a8372b4..ddd5d58 100644
--- a/lib/django-1.4/django/contrib/gis/geos/tests/test_geos.py
+++ b/lib/django-1.4/django/contrib/gis/geos/tests/test_geos.py
@@ -662,7 +662,7 @@
             for i in range(len(mp)):
                 # Creating a random point.
                 pnt = mp[i]
-                new = Point(random.randint(1, 100), random.randint(1, 100))
+                new = Point(random.randint(21, 100), random.randint(21, 100))
                 # Testing the assignment
                 mp[i] = new
                 s = str(new) # what was used for the assignment is still accessible
diff --git a/lib/django-1.4/django/contrib/gis/tests/inspectapp/tests.py b/lib/django-1.4/django/contrib/gis/tests/inspectapp/tests.py
index a3d1978..216aea7 100644
--- a/lib/django-1.4/django/contrib/gis/tests/inspectapp/tests.py
+++ b/lib/django-1.4/django/contrib/gis/tests/inspectapp/tests.py
@@ -68,23 +68,27 @@
                                layer_key=AllOGRFields._meta.db_table,
                                decimal=['f_decimal'])
 
-        expected = [
-            '# This is an auto-generated Django model module created by ogrinspect.',
-            'from django.contrib.gis.db import models',
-            '',
-            'class Measurement(models.Model):',
-            '    f_decimal = models.DecimalField(max_digits=0, decimal_places=0)',
-            '    f_int = models.IntegerField()',
-            '    f_datetime = models.DateTimeField()',
-            '    f_time = models.TimeField()',
-            '    f_float = models.FloatField()',
-            '    f_char = models.CharField(max_length=10)',
-            '    f_date = models.DateField()',
-            '    geom = models.PolygonField()',
-            '    objects = models.GeoManager()',
-        ]
+        self.assertTrue(model_def.startswith(
+            '# This is an auto-generated Django model module created by ogrinspect.\n'
+            'from django.contrib.gis.db import models\n'
+            '\n'
+            'class Measurement(models.Model):\n'
+        ))
 
-        self.assertEqual(model_def, '\n'.join(expected))
+        # The ordering of model fields might vary depending on several factors (version of GDAL, etc.)
+        self.assertIn('    f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', model_def)
+        self.assertIn('    f_int = models.IntegerField()', model_def)
+        self.assertIn('    f_datetime = models.DateTimeField()', model_def)
+        self.assertIn('    f_time = models.TimeField()', model_def)
+        self.assertIn('    f_float = models.FloatField()', model_def)
+        self.assertIn('    f_char = models.CharField(max_length=10)', model_def)
+        self.assertIn('    f_date = models.DateField()', model_def)
+
+        self.assertTrue(model_def.endswith(
+            '    geom = models.PolygonField()\n'
+            '    objects = models.GeoManager()'
+        ))
+
 
 def get_ogr_db_string():
     # Construct the DB string that GDAL will use to inspect the database.
diff --git a/lib/django-1.4/django/contrib/sites/tests.py b/lib/django-1.4/django/contrib/sites/tests.py
index 828badb..1fd52e6 100644
--- a/lib/django-1.4/django/contrib/sites/tests.py
+++ b/lib/django-1.4/django/contrib/sites/tests.py
@@ -3,6 +3,7 @@
 from django.core.exceptions import ObjectDoesNotExist
 from django.http import HttpRequest
 from django.test import TestCase
+from django.test.utils import override_settings
 
 
 class SitesFrameworkTests(TestCase):
@@ -39,6 +40,7 @@
         site = Site.objects.get_current()
         self.assertEqual(u"Example site", site.name)
 
+    @override_settings(ALLOWED_HOSTS=['example.com'])
     def test_get_current_site(self):
         # Test that the correct Site object is returned
         request = HttpRequest()
diff --git a/lib/django-1.4/django/core/handlers/base.py b/lib/django-1.4/django/core/handlers/base.py
index a0918bf..99f81a6 100644
--- a/lib/django-1.4/django/core/handlers/base.py
+++ b/lib/django-1.4/django/core/handlers/base.py
@@ -14,8 +14,6 @@
     response_fixes = [
         http.fix_location_header,
         http.conditional_content_removal,
-        http.fix_IE_for_attach,
-        http.fix_IE_for_vary,
     ]
 
     def __init__(self):
diff --git a/lib/django-1.4/django/core/serializers/xml_serializer.py b/lib/django-1.4/django/core/serializers/xml_serializer.py
index a5edeac..6360ada 100644
--- a/lib/django-1.4/django/core/serializers/xml_serializer.py
+++ b/lib/django-1.4/django/core/serializers/xml_serializer.py
@@ -8,6 +8,8 @@
 from django.utils.xmlutils import SimplerXMLGenerator
 from django.utils.encoding import smart_unicode
 from xml.dom import pulldom
+from xml.sax import handler
+from xml.sax.expatreader import ExpatParser as _ExpatParser
 
 class Serializer(base.Serializer):
     """
@@ -149,9 +151,13 @@
 
     def __init__(self, stream_or_string, **options):
         super(Deserializer, self).__init__(stream_or_string, **options)
-        self.event_stream = pulldom.parse(self.stream)
+        self.event_stream = pulldom.parse(self.stream, self._make_parser())
         self.db = options.pop('using', DEFAULT_DB_ALIAS)
 
+    def _make_parser(self):
+        """Create a hardened XML parser (no custom/external entities)."""
+        return DefusedExpatParser()
+
     def next(self):
         for event, node in self.event_stream:
             if event == "START_ELEMENT" and node.nodeName == "object":
@@ -290,3 +296,90 @@
         else:
            pass
     return u"".join(inner_text)
+
+
+# Below code based on Christian Heimes' defusedxml
+
+
+class DefusedExpatParser(_ExpatParser):
+    """
+    An expat parser hardened against XML bomb attacks.
+
+    Forbids DTDs, external entity references
+
+    """
+    def __init__(self, *args, **kwargs):
+        _ExpatParser.__init__(self, *args, **kwargs)
+        self.setFeature(handler.feature_external_ges, False)
+        self.setFeature(handler.feature_external_pes, False)
+
+    def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+        raise DTDForbidden(name, sysid, pubid)
+
+    def entity_decl(self, name, is_parameter_entity, value, base,
+                    sysid, pubid, notation_name):
+        raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+    def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+        # expat 1.2
+        raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
+
+    def external_entity_ref_handler(self, context, base, sysid, pubid):
+        raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+    def reset(self):
+        _ExpatParser.reset(self)
+        parser = self._parser
+        parser.StartDoctypeDeclHandler = self.start_doctype_decl
+        parser.EntityDeclHandler = self.entity_decl
+        parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
+        parser.ExternalEntityRefHandler = self.external_entity_ref_handler
+
+
+class DefusedXmlException(ValueError):
+    """Base exception."""
+    def __repr__(self):
+        return str(self)
+
+
+class DTDForbidden(DefusedXmlException):
+    """Document type definition is forbidden."""
+    def __init__(self, name, sysid, pubid):
+        super(DTDForbidden, self).__init__()
+        self.name = name
+        self.sysid = sysid
+        self.pubid = pubid
+
+    def __str__(self):
+        tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
+        return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class EntitiesForbidden(DefusedXmlException):
+    """Entity definition is forbidden."""
+    def __init__(self, name, value, base, sysid, pubid, notation_name):
+        super(EntitiesForbidden, self).__init__()
+        self.name = name
+        self.value = value
+        self.base = base
+        self.sysid = sysid
+        self.pubid = pubid
+        self.notation_name = notation_name
+
+    def __str__(self):
+        tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
+        return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class ExternalReferenceForbidden(DefusedXmlException):
+    """Resolving an external reference is forbidden."""
+    def __init__(self, context, base, sysid, pubid):
+        super(ExternalReferenceForbidden, self).__init__()
+        self.context = context
+        self.base = base
+        self.sysid = sysid
+        self.pubid = pubid
+
+    def __str__(self):
+        tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
+        return tpl.format(self.sysid, self.pubid)
diff --git a/lib/django-1.4/django/core/urlresolvers.py b/lib/django-1.4/django/core/urlresolvers.py
index 1497d43..6d75cb7 100644
--- a/lib/django-1.4/django/core/urlresolvers.py
+++ b/lib/django-1.4/django/core/urlresolvers.py
@@ -7,6 +7,7 @@
     (view_function, function_args, function_kwargs)
 """
 
+import functools
 import re
 from threading import local
 
@@ -230,6 +231,10 @@
         self._reverse_dict = {}
         self._namespace_dict = {}
         self._app_dict = {}
+        # set of dotted paths to all functions and classes that are used in
+        # urlpatterns
+        self._callback_strs = set()
+        self._populated = False
 
     def __repr__(self):
         return smart_str(u'<%s %s (%s:%s) %s>' % (self.__class__.__name__, self.urlconf_name, self.app_name, self.namespace, self.regex.pattern))
@@ -240,6 +245,18 @@
         apps = {}
         language_code = get_language()
         for pattern in reversed(self.url_patterns):
+            if hasattr(pattern, '_callback_str'):
+                self._callback_strs.add(pattern._callback_str)
+            elif hasattr(pattern, '_callback'):
+                callback = pattern._callback
+                if isinstance(callback, functools.partial):
+                    callback = callback.func
+
+                if not hasattr(callback, '__name__'):
+                    lookup_str = callback.__module__ + "." + callback.__class__.__name__
+                else:
+                    lookup_str = callback.__module__ + "." + callback.__name__
+                self._callback_strs.add(lookup_str)
             p_pattern = pattern.regex.pattern
             if p_pattern.startswith('^'):
                 p_pattern = p_pattern[1:]
@@ -260,6 +277,7 @@
                         namespaces[namespace] = (p_pattern + prefix, sub_pattern)
                     for app_name, namespace_list in pattern.app_dict.items():
                         apps.setdefault(app_name, []).extend(namespace_list)
+                    self._callback_strs.update(pattern._callback_strs)
             else:
                 bits = normalize(p_pattern)
                 lookups.appendlist(pattern.callback, (bits, p_pattern, pattern.default_args))
@@ -268,6 +286,7 @@
         self._reverse_dict[language_code] = lookups
         self._namespace_dict[language_code] = namespaces
         self._app_dict[language_code] = apps
+        self._populated = True
 
     @property
     def reverse_dict(self):
@@ -356,8 +375,13 @@
     def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
         if args and kwargs:
             raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
+
+        if not self._populated:
+            self._populate()
+
         try:
-            lookup_view = get_callable(lookup_view, True)
+            if lookup_view in self._callback_strs:
+                lookup_view = get_callable(lookup_view, True)
         except (ImportError, AttributeError), e:
             raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e))
         possibilities = self.reverse_dict.getlist(lookup_view)
diff --git a/lib/django-1.4/django/db/__init__.py b/lib/django-1.4/django/db/__init__.py
index 26c7add..605c3a2 100644
--- a/lib/django-1.4/django/db/__init__.py
+++ b/lib/django-1.4/django/db/__init__.py
@@ -42,8 +42,14 @@
 # Register an event that closes the database connection
 # when a Django request is finished.
 def close_connection(**kwargs):
-    for conn in connections.all():
-        conn.close()
+    # Avoid circular imports
+    from django.db import transaction
+    for conn in connections:
+        # If an error happens here the connection will be left in broken
+        # state. Once a good db connection is again available, the
+        # connection state will be cleaned up.
+        transaction.abort(conn)
+        connections[conn].close()
 signals.request_finished.connect(close_connection)
 
 # Register an event that resets connection.queries
diff --git a/lib/django-1.4/django/db/backends/__init__.py b/lib/django-1.4/django/db/backends/__init__.py
index 14e9c4a..9c7936e 100644
--- a/lib/django-1.4/django/db/backends/__init__.py
+++ b/lib/django-1.4/django/db/backends/__init__.py
@@ -83,6 +83,17 @@
             return
         self.cursor().execute(self.ops.savepoint_commit_sql(sid))
 
+    def abort(self):
+        """
+        Roll back any ongoing transaction and clean the transaction state
+        stack.
+        """
+        if self._dirty:
+            self._rollback()
+            self._dirty = False
+        while self.transaction_state:
+            self.leave_transaction_management()
+
     def enter_transaction_management(self, managed=True):
         """
         Enters transaction management for a running thread. It must be balanced with
diff --git a/lib/django-1.4/django/db/backends/mysql/base.py b/lib/django-1.4/django/db/backends/mysql/base.py
index 91dc4d2..f9c1e2e 100644
--- a/lib/django-1.4/django/db/backends/mysql/base.py
+++ b/lib/django-1.4/django/db/backends/mysql/base.py
@@ -177,7 +177,7 @@
             # will tell you the default table type of the created
             # table. Since all Django's test tables will have the same
             # table type, that's enough to evaluate the feature.
-            cursor.execute("SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'")
+            cursor.execute("SHOW TABLE STATUS LIKE 'INTROSPECT_TEST'")
             result = cursor.fetchone()
             cursor.execute('DROP TABLE INTROSPECT_TEST')
             self._storage_engine = result[1]
diff --git a/lib/django-1.4/django/db/backends/oracle/creation.py b/lib/django-1.4/django/db/backends/oracle/creation.py
index 9e6133f..9d4f9b1 100644
--- a/lib/django-1.4/django/db/backends/oracle/creation.py
+++ b/lib/django-1.4/django/db/backends/oracle/creation.py
@@ -167,6 +167,7 @@
                IDENTIFIED BY %(password)s
                DEFAULT TABLESPACE %(tblspace)s
                TEMPORARY TABLESPACE %(tblspace_temp)s
+               QUOTA UNLIMITED ON %(tblspace)s
             """,
             """GRANT CONNECT, RESOURCE TO %(user)s""",
         ]
diff --git a/lib/django-1.4/django/db/backends/oracle/introspection.py b/lib/django-1.4/django/db/backends/oracle/introspection.py
index b8a8b2e..7d477f6 100644
--- a/lib/django-1.4/django/db/backends/oracle/introspection.py
+++ b/lib/django-1.4/django/db/backends/oracle/introspection.py
@@ -72,14 +72,14 @@
     FROM   user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb,
            user_tab_cols ta, user_tab_cols tb
     WHERE  user_constraints.table_name = %s AND
-           ta.table_name = %s AND
+           ta.table_name = user_constraints.table_name AND
            ta.column_name = ca.column_name AND
-           ca.table_name = %s AND
+           ca.table_name = ta.table_name AND
            user_constraints.constraint_name = ca.constraint_name AND
            user_constraints.r_constraint_name = cb.constraint_name AND
            cb.table_name = tb.table_name AND
            cb.column_name = tb.column_name AND
-           ca.position = cb.position""", [table_name, table_name, table_name])
+           ca.position = cb.position""", [table_name])
 
         relations = {}
         for row in cursor.fetchall():
@@ -87,36 +87,31 @@
         return relations
 
     def get_indexes(self, cursor, table_name):
+        sql = """
+    SELECT LOWER(uic1.column_name) AS column_name,
+           CASE user_constraints.constraint_type
+               WHEN 'P' THEN 1 ELSE 0
+           END AS is_primary_key,
+           CASE user_indexes.uniqueness
+               WHEN 'UNIQUE' THEN 1 ELSE 0
+           END AS is_unique
+    FROM   user_constraints, user_indexes, user_ind_columns uic1
+    WHERE  user_constraints.constraint_type (+) = 'P'
+      AND  user_constraints.index_name (+) = uic1.index_name
+      AND  user_indexes.uniqueness (+) = 'UNIQUE'
+      AND  user_indexes.index_name (+) = uic1.index_name
+      AND  uic1.table_name = UPPER(%s)
+      AND  uic1.column_position = 1
+      AND  NOT EXISTS (
+              SELECT 1
+              FROM   user_ind_columns uic2
+              WHERE  uic2.index_name = uic1.index_name
+                AND  uic2.column_position = 2
+           )
         """
-        Returns a dictionary of fieldname -> infodict for the given table,
-        where each infodict is in the format:
-            {'primary_key': boolean representing whether it's the primary key,
-             'unique': boolean representing whether it's a unique index}
-        """
-        # This query retrieves each index on the given table, including the
-        # first associated field name
-        # "We were in the nick of time; you were in great peril!"
-        sql = """\
-SELECT LOWER(all_tab_cols.column_name) AS column_name,
-       CASE user_constraints.constraint_type
-           WHEN 'P' THEN 1 ELSE 0
-       END AS is_primary_key,
-       CASE user_indexes.uniqueness
-           WHEN 'UNIQUE' THEN 1 ELSE 0
-       END AS is_unique
-FROM   all_tab_cols, user_cons_columns, user_constraints, user_ind_columns, user_indexes
-WHERE  all_tab_cols.column_name = user_cons_columns.column_name (+)
-  AND  all_tab_cols.table_name = user_cons_columns.table_name (+)
-  AND  user_cons_columns.constraint_name = user_constraints.constraint_name (+)
-  AND  user_constraints.constraint_type (+) = 'P'
-  AND  user_ind_columns.column_name (+) = all_tab_cols.column_name
-  AND  user_ind_columns.table_name (+) = all_tab_cols.table_name
-  AND  user_indexes.uniqueness (+) = 'UNIQUE'
-  AND  user_indexes.index_name (+) = user_ind_columns.index_name
-  AND  all_tab_cols.table_name = UPPER(%s)
-"""
         cursor.execute(sql, [table_name])
         indexes = {}
         for row in cursor.fetchall():
-            indexes[row[0]] = {'primary_key': row[1], 'unique': row[2]}
+            indexes[row[0]] = {'primary_key': bool(row[1]),
+                               'unique': bool(row[2])}
         return indexes
diff --git a/lib/django-1.4/django/db/models/fields/__init__.py b/lib/django-1.4/django/db/models/fields/__init__.py
index 527a3c0..690a671 100644
--- a/lib/django-1.4/django/db/models/fields/__init__.py
+++ b/lib/django-1.4/django/db/models/fields/__init__.py
@@ -911,6 +911,12 @@
         kwargs['max_length'] = kwargs.get('max_length', 100)
         Field.__init__(self, verbose_name, name, **kwargs)
 
+    def get_prep_value(self, value):
+        value = super(FilePathField, self).get_prep_value(value)
+        if value is None:
+            return None
+        return smart_unicode(value)
+
     def formfield(self, **kwargs):
         defaults = {
             'path': self.path,
@@ -1010,6 +1016,12 @@
         kwargs['max_length'] = 15
         Field.__init__(self, *args, **kwargs)
 
+    def get_prep_value(self, value):
+        value = super(IPAddressField, self).get_prep_value(value)
+        if value is None:
+            return None
+        return smart_unicode(value)
+
     def get_internal_type(self):
         return "IPAddressField"
 
@@ -1047,12 +1059,14 @@
         return value or None
 
     def get_prep_value(self, value):
+        if value is None:
+            return value
         if value and ':' in value:
             try:
                 return clean_ipv6_address(value, self.unpack_ipv4)
             except exceptions.ValidationError:
                 pass
-        return value
+        return smart_unicode(value)
 
     def formfield(self, **kwargs):
         defaults = {'form_class': forms.GenericIPAddressField}
diff --git a/lib/django-1.4/django/db/models/fields/related.py b/lib/django-1.4/django/db/models/fields/related.py
index 28e8e06..e5f12fe 100644
--- a/lib/django-1.4/django/db/models/fields/related.py
+++ b/lib/django-1.4/django/db/models/fields/related.py
@@ -544,12 +544,14 @@
                                  "a many-to-many relationship can be used." %
                                  instance.__class__.__name__)
 
-
         def _get_fk_val(self, obj, field_name):
             """
             Returns the correct value for this relationship's foreign key. This
             might be something else than pk value when to_field is used.
             """
+            if not self.through:
+                # Make custom m2m fields with no through model defined usable.
+                return obj.pk
             fk = self.through._meta.get_field(field_name)
             if fk.rel.field_name and fk.rel.field_name != fk.rel.to._meta.pk.attname:
                 attname = fk.rel.get_related_field().get_attname()
diff --git a/lib/django-1.4/django/db/transaction.py b/lib/django-1.4/django/db/transaction.py
index 4ecd2d1..48166d8 100644
--- a/lib/django-1.4/django/db/transaction.py
+++ b/lib/django-1.4/django/db/transaction.py
@@ -25,6 +25,21 @@
     """
     pass
 
+def abort(using=None):
+    """
+    Roll back any ongoing transactions and clean the transaction management
+    state of the connection.
+
+    This method is to be used only in cases where using balanced
+    leave_transaction_management() calls isn't possible. For example after a
+    request has finished, the transaction state isn't known, yet the connection
+    must be cleaned up for the next request.
+    """
+    if using is None:
+        using = DEFAULT_DB_ALIAS
+    connection = connections[using]
+    connection.abort()
+
 def enter_transaction_management(managed=True, using=None):
     """
     Enters transaction management for a running thread. It must be balanced with
diff --git a/lib/django-1.4/django/forms/formsets.py b/lib/django-1.4/django/forms/formsets.py
index dcd2f01..7feeeb1 100644
--- a/lib/django-1.4/django/forms/formsets.py
+++ b/lib/django-1.4/django/forms/formsets.py
@@ -19,6 +19,9 @@
 ORDERING_FIELD_NAME = 'ORDER'
 DELETION_FIELD_NAME = 'DELETE'
 
+# default maximum number of forms in a formset, to prevent memory exhaustion
+DEFAULT_MAX_NUM = 1000
+
 class ManagementForm(Form):
     """
     ``ManagementForm`` is used to keep track of how many form instances
@@ -111,7 +114,7 @@
     def _construct_forms(self):
         # instantiate all the forms and put them in self.forms
         self.forms = []
-        for i in xrange(self.total_form_count()):
+        for i in xrange(min(self.total_form_count(), self.absolute_max)):
             self.forms.append(self._construct_form(i))
 
     def _construct_form(self, i, **kwargs):
@@ -360,9 +363,14 @@
 def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False,
                     can_delete=False, max_num=None):
     """Return a FormSet for the given form class."""
+    if max_num is None:
+        max_num = DEFAULT_MAX_NUM
+    # hard limit on forms instantiated, to prevent memory-exhaustion attacks
+    # limit defaults to DEFAULT_MAX_NUM, but developer can increase it via max_num
+    absolute_max = max(DEFAULT_MAX_NUM, max_num)
     attrs = {'form': form, 'extra': extra,
              'can_order': can_order, 'can_delete': can_delete,
-             'max_num': max_num}
+             'max_num': max_num, 'absolute_max': absolute_max}
     return type(form.__name__ + 'FormSet', (formset,), attrs)
 
 def all_valid(formsets):
diff --git a/lib/django-1.4/django/http/__init__.py b/lib/django-1.4/django/http/__init__.py
index da993eb..4f5fbe6 100644
--- a/lib/django-1.4/django/http/__init__.py
+++ b/lib/django-1.4/django/http/__init__.py
@@ -215,11 +215,12 @@
             if server_port != (self.is_secure() and '443' or '80'):
                 host = '%s:%s' % (host, server_port)
 
-        # Disallow potentially poisoned hostnames.
-        if not host_validation_re.match(host.lower()):
-            raise SuspiciousOperation('Invalid HTTP_HOST header: %s' % host)
-
-        return host
+        allowed_hosts = ['*'] if settings.DEBUG else settings.ALLOWED_HOSTS
+        if validate_host(host, allowed_hosts):
+            return host
+        else:
+            raise SuspiciousOperation(
+                "Invalid HTTP_HOST header (you may need to set ALLOWED_HOSTS): %s" % host)
 
     def get_full_path(self):
         # RFC 3986 requires query string arguments to be in the ASCII range.
@@ -799,3 +800,43 @@
     else:
         return s
 
+def validate_host(host, allowed_hosts):
+    """
+    Validate the given host header value for this site.
+
+    Check that the host looks valid and matches a host or host pattern in the
+    given list of ``allowed_hosts``. Any pattern beginning with a period
+    matches a domain and all its subdomains (e.g. ``.example.com`` matches
+    ``example.com`` and any subdomain), ``*`` matches anything, and anything
+    else must match exactly.
+
+    Return ``True`` for a valid host, ``False`` otherwise.
+
+    """
+    # All validation is case-insensitive
+    host = host.lower()
+
+    # Basic sanity check
+    if not host_validation_re.match(host):
+        return False
+
+    # Validate only the domain part.
+    if host[-1] == ']':
+        # It's an IPv6 address without a port.
+        domain = host
+    else:
+        domain = host.rsplit(':', 1)[0]
+
+    for pattern in allowed_hosts:
+        pattern = pattern.lower()
+        match = (
+            pattern == '*' or
+            pattern.startswith('.') and (
+                domain.endswith(pattern) or domain == pattern[1:]
+                ) or
+            pattern == domain
+            )
+        if match:
+            return True
+
+    return False
diff --git a/lib/django-1.4/django/http/utils.py b/lib/django-1.4/django/http/utils.py
index 0180864..f98ca93 100644
--- a/lib/django-1.4/django/http/utils.py
+++ b/lib/django-1.4/django/http/utils.py
@@ -31,57 +31,3 @@
     if request.method == 'HEAD':
         response.content = ''
     return response
-
-def fix_IE_for_attach(request, response):
-    """
-    This function will prevent Django from serving a Content-Disposition header
-    while expecting the browser to cache it (only when the browser is IE). This
-    leads to IE not allowing the client to download.
-    """
-    useragent = request.META.get('HTTP_USER_AGENT', '').upper()
-    if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
-        return response
-
-    offending_headers = ('no-cache', 'no-store')
-    if response.has_header('Content-Disposition'):
-        try:
-            del response['Pragma']
-        except KeyError:
-            pass
-        if response.has_header('Cache-Control'):
-            cache_control_values = [value.strip() for value in
-                    response['Cache-Control'].split(',')
-                    if value.strip().lower() not in offending_headers]
-
-            if not len(cache_control_values):
-                del response['Cache-Control']
-            else:
-                response['Cache-Control'] = ', '.join(cache_control_values)
-
-    return response
-
-def fix_IE_for_vary(request, response):
-    """
-    This function will fix the bug reported at
-    http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global
-    by clearing the Vary header whenever the mime-type is not safe
-    enough for Internet Explorer to handle.  Poor thing.
-    """
-    useragent = request.META.get('HTTP_USER_AGENT', '').upper()
-    if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
-        return response
-
-    # These mime-types that are decreed "Vary-safe" for IE:
-    safe_mime_types = ('text/html', 'text/plain', 'text/sgml')
-
-    # The first part of the Content-Type field will be the MIME type,
-    # everything after ';', such as character-set, can be ignored.
-    mime_type = response.get('Content-Type', '').partition(';')[0]
-    if mime_type not in safe_mime_types:
-        try:
-            del response['Vary']
-        except KeyError:
-            pass
-
-    return response
-
diff --git a/lib/django-1.4/django/middleware/cache.py b/lib/django-1.4/django/middleware/cache.py
index 34bf0ca..760ba4e 100644
--- a/lib/django-1.4/django/middleware/cache.py
+++ b/lib/django-1.4/django/middleware/cache.py
@@ -50,7 +50,8 @@
 
 from django.conf import settings
 from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
-from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age
+from django.utils.cache import (get_cache_key, get_max_age, has_vary_header,
+    learn_cache_key, patch_response_headers)
 
 
 class UpdateCacheMiddleware(object):
@@ -93,8 +94,15 @@
         if not self._should_update_cache(request, response):
             # We don't need to update the cache, just return.
             return response
+
         if not response.status_code == 200:
             return response
+
+        # Don't cache responses that set a user-specific (and maybe security
+        # sensitive) cookie in response to a cookie-less request.
+        if not request.COOKIES and response.cookies and has_vary_header(response, 'Cookie'):
+            return response
+
         # Try to get the timeout from the "max-age" section of the "Cache-
         # Control" header before reverting to using the default cache_timeout
         # length.
diff --git a/lib/django-1.4/django/middleware/transaction.py b/lib/django-1.4/django/middleware/transaction.py
index 96b1538..4440f37 100644
--- a/lib/django-1.4/django/middleware/transaction.py
+++ b/lib/django-1.4/django/middleware/transaction.py
@@ -15,6 +15,10 @@
     def process_exception(self, request, exception):
         """Rolls back the database and leaves transaction management"""
         if transaction.is_dirty():
+            # This rollback might fail because of network failure for example.
+            # If rollback isn't possible it is impossible to clean the
+            # connection's state. So leave the connection in dirty state and
+            # let request_finished signal deal with cleaning the connection.
             transaction.rollback()
         transaction.leave_transaction_management()
 
@@ -22,6 +26,21 @@
         """Commits and leaves transaction management."""
         if transaction.is_managed():
             if transaction.is_dirty():
-                transaction.commit()
+                # Note: it is possible that the commit fails. If the reason is
+                # closed connection or some similar reason, then there is
+                # little hope to proceed nicely. However, in some cases (
+                # deferred foreign key checks for exampl) it is still possible
+                # to rollback().
+                try:
+                    transaction.commit()
+                except Exception:
+                    # If the rollback fails, the transaction state will be
+                    # messed up. It doesn't matter, the connection will be set
+                    # to clean state after the request finishes. And, we can't
+                    # clean the state here properly even if we wanted to, the
+                    # connection is in transaction but we can't rollback...
+                    transaction.rollback()
+                    transaction.leave_transaction_management()
+                    raise
             transaction.leave_transaction_management()
         return response
diff --git a/lib/django-1.4/django/template/defaulttags.py b/lib/django-1.4/django/template/defaulttags.py
index 954c5d6..f977901 100644
--- a/lib/django-1.4/django/template/defaulttags.py
+++ b/lib/django-1.4/django/template/defaulttags.py
@@ -1,5 +1,6 @@
 """Default tags used by the template system, available to all templates."""
 
+import os
 import sys
 import re
 from datetime import datetime
@@ -309,6 +310,7 @@
         return ''
 
 def include_is_allowed(filepath):
+    filepath = os.path.abspath(filepath)
     for root in settings.ALLOWED_INCLUDE_ROOTS:
         if filepath.startswith(root):
             return True
diff --git a/lib/django-1.4/django/test/testcases.py b/lib/django-1.4/django/test/testcases.py
index 1f45187..8e79489 100644
--- a/lib/django-1.4/django/test/testcases.py
+++ b/lib/django-1.4/django/test/testcases.py
@@ -63,6 +63,7 @@
 real_enter_transaction_management = transaction.enter_transaction_management
 real_leave_transaction_management = transaction.leave_transaction_management
 real_managed = transaction.managed
+real_abort = transaction.abort
 
 def nop(*args, **kwargs):
     return
@@ -73,6 +74,7 @@
     transaction.enter_transaction_management = nop
     transaction.leave_transaction_management = nop
     transaction.managed = nop
+    transaction.abort = nop
 
 def restore_transaction_methods():
     transaction.commit = real_commit
@@ -80,6 +82,7 @@
     transaction.enter_transaction_management = real_enter_transaction_management
     transaction.leave_transaction_management = real_leave_transaction_management
     transaction.managed = real_managed
+    transaction.abort = real_abort
 
 
 def assert_and_parse_html(self, html, user_msg, msg):
diff --git a/lib/django-1.4/django/test/utils.py b/lib/django-1.4/django/test/utils.py
index ed5ab59..0988d89 100644
--- a/lib/django-1.4/django/test/utils.py
+++ b/lib/django-1.4/django/test/utils.py
@@ -3,6 +3,7 @@
 import warnings
 from django.conf import settings, UserSettingsHolder
 from django.core import mail
+from django import http
 from django.test.signals import template_rendered, setting_changed
 from django.template import Template, loader, TemplateDoesNotExist
 from django.template.loaders import cached
@@ -69,12 +70,18 @@
         - Set the email backend to the locmem email backend.
         - Setting the active locale to match the LANGUAGE_CODE setting.
     """
-    Template.original_render = Template._render
+    Template._original_render = Template._render
     Template._render = instrumented_test_render
 
-    mail.original_email_backend = settings.EMAIL_BACKEND
+    # Storing previous values in the settings module itself is problematic.
+    # Store them in arbitrary (but related) modules instead. See #20636.
+
+    mail._original_email_backend = settings.EMAIL_BACKEND
     settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
 
+    http._original_allowed_hosts = settings.ALLOWED_HOSTS
+    settings.ALLOWED_HOSTS = ['*']
+
     mail.outbox = []
 
     deactivate()
@@ -87,11 +94,14 @@
         - Restoring the email sending functions
 
     """
-    Template._render = Template.original_render
-    del Template.original_render
+    Template._render = Template._original_render
+    del Template._original_render
 
-    settings.EMAIL_BACKEND = mail.original_email_backend
-    del mail.original_email_backend
+    settings.EMAIL_BACKEND = mail._original_email_backend
+    del mail._original_email_backend
+
+    settings.ALLOWED_HOSTS = http._original_allowed_hosts
+    del http._original_allowed_hosts
 
     del mail.outbox
 
diff --git a/lib/django-1.4/django/utils/crypto.py b/lib/django-1.4/django/utils/crypto.py
index 44b7faf..fb3faeb 100644
--- a/lib/django-1.4/django/utils/crypto.py
+++ b/lib/django-1.4/django/utils/crypto.py
@@ -106,21 +106,6 @@
     return binascii.unhexlify(hex_format_string % x)
 
 
-def _fast_hmac(key, msg, digest):
-    """
-    A trimmed down version of Python's HMAC implementation
-    """
-    dig1, dig2 = digest(), digest()
-    if len(key) > dig1.block_size:
-        key = digest(key).digest()
-    key += chr(0) * (dig1.block_size - len(key))
-    dig1.update(key.translate(_trans_36))
-    dig1.update(msg)
-    dig2.update(key.translate(_trans_5c))
-    dig2.update(dig1.digest())
-    return dig2
-
-
 def pbkdf2(password, salt, iterations, dklen=0, digest=None):
     """
     Implements PBKDF2 as defined in RFC 2898, section 5.2
@@ -146,11 +131,21 @@
 
     hex_format_string = "%%0%ix" % (hlen * 2)
 
+    inner, outer = digest(), digest()
+    if len(password) > inner.block_size:
+        password = digest(password).digest()
+    password += '\x00' * (inner.block_size - len(password))
+    inner.update(password.translate(_trans_36))
+    outer.update(password.translate(_trans_5c))
+
     def F(i):
         def U():
             u = salt + struct.pack('>I', i)
             for j in xrange(int(iterations)):
-                u = _fast_hmac(password, u, digest).digest()
+                dig1, dig2 = inner.copy(), outer.copy()
+                dig1.update(u)
+                dig2.update(dig1.digest())
+                u = dig2.digest()
                 yield _bin_to_long(u)
         return _long_to_bin(reduce(operator.xor, U()), hex_format_string)
 
diff --git a/lib/django-1.4/django/utils/datetime_safe.py b/lib/django-1.4/django/utils/datetime_safe.py
index b634888..ca96fb3 100644
--- a/lib/django-1.4/django/utils/datetime_safe.py
+++ b/lib/django-1.4/django/utils/datetime_safe.py
@@ -19,8 +19,11 @@
     def strftime(self, fmt):
         return strftime(self, fmt)
 
-    def combine(self, date, time):
-        return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo)
+    @classmethod
+    def combine(cls, date, time):
+        return cls(date.year, date.month, date.day,
+                   time.hour, time.minute, time.second,
+                   time.microsecond, time.tzinfo)
 
     def date(self):
         return date(self.year, self.month, self.day)
diff --git a/lib/django-1.4/django/utils/http.py b/lib/django-1.4/django/utils/http.py
index d2e4eb5..2d40489 100644
--- a/lib/django-1.4/django/utils/http.py
+++ b/lib/django-1.4/django/utils/http.py
@@ -228,11 +228,24 @@
 def is_safe_url(url, host=None):
     """
     Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
-    a different host).
+    a different host and uses a safe scheme).
 
     Always returns ``False`` on an empty url.
     """
     if not url:
         return False
-    netloc = urlparse.urlparse(url)[1]
-    return not netloc or netloc == host
+    # Chrome treats \ completely as /
+    url = url.replace('\\', '/')
+    # Chrome considers any URL with more than two slashes to be absolute, but
+    # urlaprse is not so flexible. Treat any url with three slashes as unsafe.
+    if url.startswith('///'):
+        return False
+    url_info = urlparse.urlparse(url)
+    # Forbid URLs like http:///example.com - with a scheme, but without a hostname.
+    # In that URL, example.com is not the hostname but, a path component. However,
+    # Chrome will still consider example.com to be the hostname, so we must not
+    # allow this syntax.
+    if not url_info[1] and url_info[0]:
+        return False
+    return (not url_info[1] or url_info[1] == host) and \
+        (not url_info[0] or url_info[0] in ['http', 'https'])
diff --git a/lib/django-1.4/django/utils/six.py b/lib/django-1.4/django/utils/six.py
index e4ce939..26370d7 100644
--- a/lib/django-1.4/django/utils/six.py
+++ b/lib/django-1.4/django/utils/six.py
@@ -1,14 +1,35 @@
 """Utilities for writing code that runs on Python 2 and 3"""
 
+# Copyright (c) 2010-2014 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
 import operator
 import sys
 import types
 
 __author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.2.0"
+__version__ = "1.6.1"
 
 
-# True if we are running on Python 3.
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
 PY3 = sys.version_info[0] == 3
 
 if PY3:
@@ -26,7 +47,7 @@
     text_type = unicode
     binary_type = str
 
-    if sys.platform == "java":
+    if sys.platform.startswith("java"):
         # Jython always uses 32 bits.
         MAXSIZE = int((1 << 31) - 1)
     else:
@@ -42,7 +63,7 @@
         else:
             # 64-bit
             MAXSIZE = int((1 << 63) - 1)
-            del X
+        del X
 
 
 def _add_doc(func, doc):
@@ -62,10 +83,14 @@
         self.name = name
 
     def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)
+        try:
+            result = self._resolve()
+        except ImportError:
+            # See the nice big comment in MovedModule.__getattr__.
+            raise AttributeError("%s could not be imported " % self.name)
+        setattr(obj, self.name, result) # Invokes __set__.
         # This is a bit ugly, but it avoids running this again.
-        delattr(tp, self.name)
+        delattr(obj.__class__, self.name)
         return result
 
 
@@ -83,6 +108,42 @@
     def _resolve(self):
         return _import_module(self.mod)
 
+    def __getattr__(self, attr):
+        # It turns out many Python frameworks like to traverse sys.modules and
+        # try to load various attributes. This causes problems if this is a
+        # platform-specific module on the wrong platform, like _winreg on
+        # Unixes. Therefore, we silently pretend unimportable modules do not
+        # have any attributes. See issues #51, #53, #56, and #63 for the full
+        # tales of woe.
+        #
+        # First, if possible, avoid loading the module just to look at __file__,
+        # __name__, or __path__.
+        if (attr in ("__file__", "__name__", "__path__") and
+            self.mod not in sys.modules):
+            raise AttributeError(attr)
+        try:
+            _module = self._resolve()
+        except ImportError:
+            raise AttributeError(attr)
+        value = getattr(_module, attr)
+        setattr(self, attr, value)
+        return value
+
+
+class _LazyModule(types.ModuleType):
+
+    def __init__(self, name):
+        super(_LazyModule, self).__init__(name)
+        self.__doc__ = self.__class__.__doc__
+
+    def __dir__(self):
+        attrs = ["__doc__", "__name__"]
+        attrs += [attr.name for attr in self._moved_attributes]
+        return attrs
+
+    # Subclasses should override this
+    _moved_attributes = []
+
 
 class MovedAttribute(_LazyDescr):
 
@@ -110,29 +171,37 @@
 
 
 
-class _MovedItems(types.ModuleType):
+class _MovedItems(_LazyModule):
     """Lazy loading of moved objects"""
 
 
 _moved_attributes = [
     MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
     MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
     MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
     MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
     MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
     MovedAttribute("reduce", "__builtin__", "functools"),
     MovedAttribute("StringIO", "StringIO", "io"),
+    MovedAttribute("UserString", "UserString", "collections"),
     MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
     MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
 
     MovedModule("builtins", "__builtin__"),
     MovedModule("configparser", "ConfigParser"),
     MovedModule("copyreg", "copy_reg"),
+    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
     MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
     MovedModule("http_cookies", "Cookie", "http.cookies"),
     MovedModule("html_entities", "htmlentitydefs", "html.entities"),
     MovedModule("html_parser", "HTMLParser", "html.parser"),
     MovedModule("http_client", "httplib", "http.client"),
+    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
     MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
     MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
     MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
@@ -140,12 +209,14 @@
     MovedModule("queue", "Queue"),
     MovedModule("reprlib", "repr"),
     MovedModule("socketserver", "SocketServer"),
+    MovedModule("_thread", "thread", "_thread"),
     MovedModule("tkinter", "Tkinter"),
     MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
     MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
     MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
     MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
     MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
     MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
     MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
     MovedModule("tkinter_colorchooser", "tkColorChooser",
@@ -157,14 +228,170 @@
     MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
     MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
                 "tkinter.simpledialog"),
+    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
     MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+    MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
     MovedModule("winreg", "_winreg"),
 ]
 for attr in _moved_attributes:
     setattr(_MovedItems, attr.name, attr)
+    if isinstance(attr, MovedModule):
+        sys.modules[__name__ + ".moves." + attr.name] = attr
 del attr
 
-moves = sys.modules["django.utils.six.moves"] = _MovedItems("moves")
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("quote", "urllib", "urllib.parse"),
+    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("urlencode", "urllib", "urllib.parse"),
+    MovedAttribute("splitquery", "urllib", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+    setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+    MovedAttribute("URLError", "urllib2", "urllib.error"),
+    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+    setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+    MovedAttribute("urlopen", "urllib2", "urllib.request"),
+    MovedAttribute("install_opener", "urllib2", "urllib.request"),
+    MovedAttribute("build_opener", "urllib2", "urllib.request"),
+    MovedAttribute("pathname2url", "urllib", "urllib.request"),
+    MovedAttribute("url2pathname", "urllib", "urllib.request"),
+    MovedAttribute("getproxies", "urllib", "urllib.request"),
+    MovedAttribute("Request", "urllib2", "urllib.request"),
+    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+    MovedAttribute("URLopener", "urllib", "urllib.request"),
+    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+    setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+    MovedAttribute("addbase", "urllib", "urllib.response"),
+    MovedAttribute("addclosehook", "urllib", "urllib.response"),
+    MovedAttribute("addinfo", "urllib", "urllib.response"),
+    MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+    setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+    parse = sys.modules[__name__ + ".moves.urllib_parse"]
+    error = sys.modules[__name__ + ".moves.urllib_error"]
+    request = sys.modules[__name__ + ".moves.urllib_request"]
+    response = sys.modules[__name__ + ".moves.urllib_response"]
+    robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
+
+    def __dir__(self):
+        return ['parse', 'error', 'request', 'response', 'robotparser']
+
+
+sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
 
 
 def add_move(move):
@@ -187,22 +414,28 @@
     _meth_func = "__func__"
     _meth_self = "__self__"
 
+    _func_closure = "__closure__"
     _func_code = "__code__"
     _func_defaults = "__defaults__"
+    _func_globals = "__globals__"
 
     _iterkeys = "keys"
     _itervalues = "values"
     _iteritems = "items"
+    _iterlists = "lists"
 else:
     _meth_func = "im_func"
     _meth_self = "im_self"
 
+    _func_closure = "func_closure"
     _func_code = "func_code"
     _func_defaults = "func_defaults"
+    _func_globals = "func_globals"
 
     _iterkeys = "iterkeys"
     _itervalues = "itervalues"
     _iteritems = "iteritems"
+    _iterlists = "iterlists"
 
 
 try:
@@ -213,18 +446,27 @@
 next = advance_iterator
 
 
+try:
+    callable = callable
+except NameError:
+    def callable(obj):
+        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
 if PY3:
     def get_unbound_function(unbound):
         return unbound
 
-    Iterator = object
+    create_bound_method = types.MethodType
 
-    def callable(obj):
-        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+    Iterator = object
 else:
     def get_unbound_function(unbound):
         return unbound.im_func
 
+    def create_bound_method(func, obj):
+        return types.MethodType(func, obj, obj.__class__)
+
     class Iterator(object):
 
         def next(self):
@@ -237,21 +479,27 @@
 
 get_method_function = operator.attrgetter(_meth_func)
 get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
 get_function_code = operator.attrgetter(_func_code)
 get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
 
 
-def iterkeys(d):
+def iterkeys(d, **kw):
     """Return an iterator over the keys of a dictionary."""
-    return iter(getattr(d, _iterkeys)())
+    return iter(getattr(d, _iterkeys)(**kw))
 
-def itervalues(d):
+def itervalues(d, **kw):
     """Return an iterator over the values of a dictionary."""
-    return iter(getattr(d, _itervalues)())
+    return iter(getattr(d, _itervalues)(**kw))
 
-def iteritems(d):
+def iteritems(d, **kw):
     """Return an iterator over the (key, value) pairs of a dictionary."""
-    return iter(getattr(d, _iteritems)())
+    return iter(getattr(d, _iteritems)(**kw))
+
+def iterlists(d, **kw):
+    """Return an iterator over the (key, [values]) pairs of a dictionary."""
+    return iter(getattr(d, _iterlists)(**kw))
 
 
 if PY3:
@@ -259,21 +507,33 @@
         return s.encode("latin-1")
     def u(s):
         return s
+    unichr = chr
     if sys.version_info[1] <= 1:
         def int2byte(i):
             return bytes((i,))
     else:
         # This is about 2x faster than the implementation above on 3.2+
         int2byte = operator.methodcaller("to_bytes", 1, "big")
+    byte2int = operator.itemgetter(0)
+    indexbytes = operator.getitem
+    iterbytes = iter
     import io
     StringIO = io.StringIO
     BytesIO = io.BytesIO
 else:
     def b(s):
         return s
+    # Workaround for standalone backslash
     def u(s):
-        return unicode(s, "unicode_escape")
+        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+    unichr = unichr
     int2byte = chr
+    def byte2int(bs):
+        return ord(bs[0])
+    def indexbytes(buf, i):
+        return ord(buf[i])
+    def iterbytes(buf):
+        return (ord(byte) for byte in buf)
     import StringIO
     StringIO = BytesIO = StringIO.StringIO
 _add_doc(b, """Byte literal""")
@@ -281,8 +541,7 @@
 
 
 if PY3:
-    import builtins
-    exec_ = getattr(builtins, "exec")
+    exec_ = getattr(moves.builtins, "exec")
 
 
     def reraise(tp, value, tb=None):
@@ -290,22 +549,18 @@
             raise value.with_traceback(tb)
         raise value
 
-
-    print_ = getattr(builtins, "print")
-    del builtins
-
 else:
-    def exec_(code, globs=None, locs=None):
+    def exec_(_code_, _globs_=None, _locs_=None):
         """Execute code in a namespace."""
-        if globs is None:
+        if _globs_ is None:
             frame = sys._getframe(1)
-            globs = frame.f_globals
-            if locs is None:
-                locs = frame.f_locals
+            _globs_ = frame.f_globals
+            if _locs_ is None:
+                _locs_ = frame.f_locals
             del frame
-        elif locs is None:
-            locs = globs
-        exec("""exec code in globs, locs""")
+        elif _locs_ is None:
+            _locs_ = _globs_
+        exec("""exec _code_ in _globs_, _locs_""")
 
 
     exec_("""def reraise(tp, value, tb=None):
@@ -313,14 +568,24 @@
 """)
 
 
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
     def print_(*args, **kwargs):
-        """The new-style print function."""
+        """The new-style print function for Python 2.4 and 2.5."""
         fp = kwargs.pop("file", sys.stdout)
         if fp is None:
             return
         def write(data):
             if not isinstance(data, basestring):
                 data = str(data)
+            # If the file has an encoding, encode unicode with it.
+            if (isinstance(fp, file) and
+                isinstance(data, unicode) and
+                fp.encoding is not None):
+                errors = getattr(fp, "errors", None)
+                if errors is None:
+                    errors = "strict"
+                data = data.encode(fp.encoding, errors)
             fp.write(data)
         want_unicode = False
         sep = kwargs.pop("sep", None)
@@ -361,21 +626,50 @@
 _add_doc(reraise, """Reraise an exception.""")
 
 
-def with_metaclass(meta, base=object):
+def with_metaclass(meta, *bases):
     """Create a base class with a metaclass."""
-    return meta("NewBase", (base,), {})
+    return meta("NewBase", bases, {})
+
+def add_metaclass(metaclass):
+    """Class decorator for creating a class with a metaclass."""
+    def wrapper(cls):
+        orig_vars = cls.__dict__.copy()
+        orig_vars.pop('__dict__', None)
+        orig_vars.pop('__weakref__', None)
+        slots = orig_vars.get('__slots__')
+        if slots is not None:
+            if isinstance(slots, str):
+                slots = [slots]
+            for slots_var in slots:
+                orig_vars.pop(slots_var)
+        return metaclass(cls.__name__, cls.__bases__, orig_vars)
+    return wrapper
 
 
 ### Additional customizations for Django ###
 
 if PY3:
-    _iterlists = "lists"
+    _assertRaisesRegex = "assertRaisesRegex"
+    _assertRegex = "assertRegex"
+    memoryview = memoryview
 else:
-    _iterlists = "iterlists"
+    _assertRaisesRegex = "assertRaisesRegexp"
+    _assertRegex = "assertRegexpMatches"
+    # memoryview and buffer are not stricly equivalent, but should be fine for
+    # django core usage (mainly BinaryField). However, Jython doesn't support
+    # buffer (see http://bugs.jython.org/issue1521), so we have to be careful.
+    if sys.platform.startswith('java'):
+        memoryview = memoryview
+    else:
+        memoryview = buffer
 
-def iterlists(d):
-    """Return an iterator over the values of a MultiValueDict."""
-    return getattr(d, _iterlists)()
+
+def assertRaisesRegex(self, *args, **kwargs):
+    return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+    return getattr(self, _assertRegex)(*args, **kwargs)
 
 
 add_move(MovedModule("_dummy_thread", "dummy_thread"))
diff --git a/lib/django-1.4/django/views/decorators/debug.py b/lib/django-1.4/django/views/decorators/debug.py
index 5c22296..381e9dd 100644
--- a/lib/django-1.4/django/views/decorators/debug.py
+++ b/lib/django-1.4/django/views/decorators/debug.py
@@ -1,5 +1,7 @@
 import functools
 
+from django.http import HttpRequest
+
 
 def sensitive_variables(*variables):
     """
@@ -62,6 +64,10 @@
     def decorator(view):
         @functools.wraps(view)
         def sensitive_post_parameters_wrapper(request, *args, **kwargs):
+            assert isinstance(request, HttpRequest), (
+              "sensitive_post_parameters didn't receive an HttpRequest. If you "
+              "are decorating a classmethod, be sure to use @method_decorator."
+            )
             if parameters:
                 request.sensitive_post_parameters = parameters
             else:
diff --git a/lib/django-1.4/docs/_ext/djangodocs.py b/lib/django-1.4/docs/_ext/djangodocs.py
index 3cf00a3..3398017 100644
--- a/lib/django-1.4/docs/_ext/djangodocs.py
+++ b/lib/django-1.4/docs/_ext/djangodocs.py
@@ -105,14 +105,22 @@
 
     # Don't use border=1, which docutils does by default.
     def visit_table(self, node):
+        self.context.append(self.compact_p)
+        self.compact_p = True
         self._table_row_index = 0 # Needed by Sphinx
         self.body.append(self.starttag(node, 'table', CLASS='docutils'))
 
-    # <big>? Really?
+    def depart_table(self, node):
+        self.compact_p = self.context.pop()
+        self.body.append('</table>\n')
+
     def visit_desc_parameterlist(self, node):
-        self.body.append('(')
+        self.body.append('(')  # by default sphinx puts <big> around the "("
         self.first_param = 1
+        self.optional_param_level = 0
         self.param_separator = node.child_text_separator
+        self.required_params_left = sum([isinstance(c, addnodes.desc_parameter)
+                                         for c in node.children])
 
     def depart_desc_parameterlist(self, node):
         self.body.append(')')
diff --git a/lib/django-1.4/docs/_theme/djangodocs/layout.html b/lib/django-1.4/docs/_theme/djangodocs/layout.html
index ef91dd7..caf990c 100644
--- a/lib/django-1.4/docs/_theme/djangodocs/layout.html
+++ b/lib/django-1.4/docs/_theme/djangodocs/layout.html
@@ -17,6 +17,9 @@
 {%- endmacro %}
 
 {% block extrahead %}
+{# When building htmlhelp (CHM format)  disable JQuery inclusion,  #}
+{# as it causes problems in compiled CHM files.                    #}
+{% if builder != "htmlhelp" %}
 {{ super() }}
 <script type="text/javascript" src="{{ pathto('templatebuiltins.js', 1) }}"></script>
 <script type="text/javascript">
@@ -51,6 +54,7 @@
     });
 })(jQuery);
 </script>
+{% endif %}
 {% endblock %}
 
 {% block document %}
diff --git a/lib/django-1.4/docs/_theme/djangodocs/static/djangodocs.css b/lib/django-1.4/docs/_theme/djangodocs/static/djangodocs.css
index 4efb7e0..bab81cd 100644
--- a/lib/django-1.4/docs/_theme/djangodocs/static/djangodocs.css
+++ b/lib/django-1.4/docs/_theme/djangodocs/static/djangodocs.css
@@ -115,7 +115,7 @@
 
 /*** versoinadded/changes ***/
 div.versionadded, div.versionchanged {  }
-div.versionadded span.title, div.versionchanged span.title { font-weight: bold; }
+div.versionadded span.title, div.versionchanged span.title, div.deprecated span.title { font-weight: bold; }
 
 /*** p-links ***/
 a.headerlink { color: #c60f0f; font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; visibility: hidden; }
diff --git a/lib/django-1.4/docs/conf.py b/lib/django-1.4/docs/conf.py
index 45e2b50..e933373 100644
--- a/lib/django-1.4/docs/conf.py
+++ b/lib/django-1.4/docs/conf.py
@@ -50,9 +50,9 @@
 # built documents.
 #
 # The short X.Y version.
-version = '1.4.3'
+version = '1.4.13'
 # The full version, including alpha/beta/rc tags.
-release = '1.4.3'
+release = '1.4.13'
 # The next version to be released
 django_next_version = '1.5'
 
diff --git a/lib/django-1.4/docs/howto/custom-model-fields.txt b/lib/django-1.4/docs/howto/custom-model-fields.txt
index daaede8..fcbda03 100644
--- a/lib/django-1.4/docs/howto/custom-model-fields.txt
+++ b/lib/django-1.4/docs/howto/custom-model-fields.txt
@@ -482,6 +482,16 @@
             return ''.join([''.join(l) for l in (value.north,
                     value.east, value.south, value.west)])
 
+.. warning::
+
+    If your custom field uses the ``CHAR``, ``VARCHAR`` or ``TEXT``
+    types for MySQL, you must make sure that :meth:`.get_prep_value`
+    always returns a string type. MySQL performs flexible and unexpected
+    matching when a query is performed on these types and the provided
+    value is an integer, which can cause queries to include unexpected
+    objects in their results. This problem cannot occur if you always
+    return a string type from :meth:`.get_prep_value`.
+
 Converting query values to database values
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
diff --git a/lib/django-1.4/docs/howto/deployment/wsgi/modwsgi.txt b/lib/django-1.4/docs/howto/deployment/wsgi/modwsgi.txt
index d669f6d..911a2d2 100644
--- a/lib/django-1.4/docs/howto/deployment/wsgi/modwsgi.txt
+++ b/lib/django-1.4/docs/howto/deployment/wsgi/modwsgi.txt
@@ -18,8 +18,8 @@
 the details about how to use mod_wsgi. You'll probably want to start with the
 `installation and configuration documentation`_.
 
-.. _official mod_wsgi documentation: http://www.modwsgi.org/
-.. _installation and configuration documentation: http://www.modwsgi.org/wiki/InstallationInstructions
+.. _official mod_wsgi documentation: http://code.google.com/p/modwsgi/
+.. _installation and configuration documentation: http://code.google.com/p/modwsgi/wiki/InstallationInstructions
 
 Basic configuration
 ===================
@@ -61,10 +61,10 @@
 
 If you install your project's Python dependencies inside a `virtualenv`_,
 you'll need to add the path to this virtualenv's ``site-packages`` directory to
-your Python path as well. To do this, you can add another line to your
-Apache configuration::
+your Python path as well. To do this, add an additional path to your
+`WSGIPythonPath` directive, with multiple paths separated by a colon::
 
-    WSGIPythonPath /path/to/your/venv/lib/python2.X/site-packages
+    WSGIPythonPath /path/to/mysite.com:/path/to/your/venv/lib/python2.X/site-packages
 
 Make sure you give the correct path to your virtualenv, and replace
 ``python2.X`` with the correct Python version (e.g. ``python2.7``).
diff --git a/lib/django-1.4/docs/howto/deployment/wsgi/uwsgi.txt b/lib/django-1.4/docs/howto/deployment/wsgi/uwsgi.txt
index 3ac2203..e553a7d 100644
--- a/lib/django-1.4/docs/howto/deployment/wsgi/uwsgi.txt
+++ b/lib/django-1.4/docs/howto/deployment/wsgi/uwsgi.txt
@@ -93,6 +93,6 @@
     uwsgi --ini uwsgi.ini
 
 See the uWSGI docs on `managing the uWSGI process`_ for information on
-starting, stoping and reloading the uWSGI workers.
+starting, stopping and reloading the uWSGI workers.
 
 .. _managing the uWSGI process: http://projects.unbit.it/uwsgi/wiki/Management
diff --git a/lib/django-1.4/docs/howto/error-reporting.txt b/lib/django-1.4/docs/howto/error-reporting.txt
index 64af2a0..5314821 100644
--- a/lib/django-1.4/docs/howto/error-reporting.txt
+++ b/lib/django-1.4/docs/howto/error-reporting.txt
@@ -123,6 +123,8 @@
 Filtering sensitive information
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+.. currentmodule:: django.views.decorators.debug
+
 Error reports are really helpful for debugging errors, so it is generally
 useful to record as much relevant information about those errors as possible.
 For example, by default Django records the `full traceback`_ for the
@@ -236,11 +238,13 @@
             request.exception_reporter_filter = CustomExceptionReporterFilter()
         ...
 
+.. currentmodule:: django.views.debug
+
 Your custom filter class needs to inherit from
 :class:`django.views.debug.SafeExceptionReporterFilter` and may override the
 following methods:
 
-.. class:: django.views.debug.SafeExceptionReporterFilter
+.. class:: SafeExceptionReporterFilter
 
 .. method:: SafeExceptionReporterFilter.is_active(self, request)
 
diff --git a/lib/django-1.4/docs/howto/initial-data.txt b/lib/django-1.4/docs/howto/initial-data.txt
index 36306d4..295b74c 100644
--- a/lib/django-1.4/docs/howto/initial-data.txt
+++ b/lib/django-1.4/docs/howto/initial-data.txt
@@ -163,4 +163,4 @@
 data. For example, if your app contains the files ``sql/person.sql``
 and ``sql/person.sqlite3.sql`` and you're installing the app on
 SQLite, Django will execute the contents of
-``sql/person.sqlite.sql`` first, then ``sql/person.sql``.
+``sql/person.sqlite3.sql`` first, then ``sql/person.sql``.
diff --git a/lib/django-1.4/docs/howto/jython.txt b/lib/django-1.4/docs/howto/jython.txt
index f485d0b..28cd2c9 100644
--- a/lib/django-1.4/docs/howto/jython.txt
+++ b/lib/django-1.4/docs/howto/jython.txt
@@ -4,70 +4,17 @@
 
 .. index:: Jython, Java, JVM
 
-Jython_ is an implementation of Python that runs on the Java platform (JVM).
-Django runs cleanly on Jython version 2.5 or later, which means you can deploy
-Django on any Java platform.
+As of January 2014, the latest release of `django-jython`_ supports Django 1.3
+which is no longer supported (receiving fixes or security updates) by the
+Django Project. We therefore recommend that you do not try to run Django on
+Jython at this time.
 
-This document will get you up and running with Django on top of Jython.
-
-.. _jython: http://www.jython.org/
-
-Installing Jython
-=================
-
-Django works with Jython versions 2.5b3 and higher. Download Jython at 
-http://www.jython.org/.
-
-Creating a servlet container
-============================
-
-If you just want to experiment with Django, skip ahead to the next section;
-Django includes a lightweight Web server you can use for testing, so you won't
-need to set up anything else until you're ready to deploy Django in production.
-
-If you want to use Django on a production site, use a Java servlet container,
-such as `Apache Tomcat`_. Full JavaEE applications servers such as `GlassFish`_
-or `JBoss`_ are also OK, if you need the extra features they include.
-
-.. _`Apache Tomcat`: http://tomcat.apache.org/
-.. _GlassFish: http://glassfish.java.net/
-.. _JBoss: http://www.jboss.org/
-
-Installing Django
-=================
-
-The next step is to install Django itself. This is exactly the same as
-installing Django on standard Python, so see
-:ref:`removing-old-versions-of-django` and :ref:`install-django-code` for
-instructions.
-
-Installing Jython platform support libraries
-============================================
-
-The `django-jython`_ project contains database backends and management commands
-for Django/Jython development. Note that the builtin Django backends won't work
-on top of Jython.
+The django-jython project is `seeking contributors`_ to help update its code for
+newer versions of Django. You can select an older version of this documentation
+to see the instructions we had for using Django with Jython. If django-jython
+is updated and please `file a ticket`_ and we'll be happy to update our
+documentation accordingly.
 
 .. _`django-jython`: http://code.google.com/p/django-jython/
-
-To install it, follow the `installation instructions`_ detailed on the project
-Web site. Also, read the `database backends`_ documentation there.
-
-.. _`installation instructions`: http://code.google.com/p/django-jython/wiki/Install
-.. _`database backends`: http://code.google.com/p/django-jython/wiki/DatabaseBackends
-
-Differences with Django on Jython
-=================================
-
-.. index:: JYTHONPATH
-
-At this point, Django on Jython should behave nearly identically to Django
-running on standard Python. However, are a few differences to keep in mind:
-
-* Remember to use the ``jython`` command instead of ``python``. The
-  documentation uses ``python`` for consistency, but if you're using Jython
-  you'll want to mentally replace ``python`` with ``jython`` every time it
-  occurs.
-
-* Similarly, you'll need to use the ``JYTHONPATH`` environment variable
-  instead of ``PYTHONPATH``.
+.. _`seeking contributors`: https://groups.google.com/d/topic/django-jython-dev/oZpKucQpz7I/discussion
+.. _`file a ticket`: https://code.djangoproject.com/newticket
diff --git a/lib/django-1.4/docs/internals/contributing/writing-code/unit-tests.txt b/lib/django-1.4/docs/internals/contributing/writing-code/unit-tests.txt
index 3e791c0..70d321a 100644
--- a/lib/django-1.4/docs/internals/contributing/writing-code/unit-tests.txt
+++ b/lib/django-1.4/docs/internals/contributing/writing-code/unit-tests.txt
@@ -141,29 +141,50 @@
 If you want to run the full suite of tests, you'll need to install a number of
 dependencies:
 
+*  numpy_
+*  PIL_
+*  py-bcrypt_
 *  PyYAML_
 *  Markdown_
 *  Textile_
 *  Docutils_
+*  pytz_
 *  setuptools_
 *  memcached_, plus a :ref:`supported Python binding <memcached>`
 *  gettext_ (:ref:`gettext_on_windows`)
 *  selenium_ (if also using Python >= 2.6)
 
+You can find these dependencies in `pip requirements files`_ inside the
+``tests/requirements`` directory of the Django source tree and install them
+like so::
+
+    pip install -r tests/requirements/base.txt
+
+You can also install the database adapter(s) of your choice using
+``oracle.txt``, ``mysql.txt``, or ``postgres.txt``.
+
 If you want to test the memcached cache backend, you'll also need to define
 a :setting:`CACHES` setting that points at your memcached instance.
 
+To run the GeoDjango tests, you will need to :doc:`setup a spatial database
+and install the Geospatial libraries</ref/contrib/gis/install>`.
+
 Each of these dependencies is optional. If you're missing any of them, the
 associated tests will be skipped.
 
+.. _numpy: https://pypi.python.org/pypi/numpy
+.. _PIL: https://pypi.python.org/pypi/PIL
+.. _py-bcrypt: https://pypi.python.org/pypi/py-bcrypt/
 .. _PyYAML: http://pyyaml.org/wiki/PyYAML
 .. _Markdown: http://pypi.python.org/pypi/Markdown/1.7
 .. _Textile: http://pypi.python.org/pypi/textile
 .. _docutils: http://pypi.python.org/pypi/docutils/0.4
+.. _pytz: https://pypi.python.org/pypi/pytz/
 .. _setuptools: http://pypi.python.org/pypi/setuptools/
 .. _memcached: http://memcached.org/
 .. _gettext: http://www.gnu.org/software/gettext/manual/gettext.html
 .. _selenium: http://pypi.python.org/pypi/selenium
+.. _pip requirements files: http://www.pip-installer.org/en/latest/requirements.html
 
 Code coverage
 ~~~~~~~~~~~~~
diff --git a/lib/django-1.4/docs/internals/deprecation.txt b/lib/django-1.4/docs/internals/deprecation.txt
index 81ca7af..c45d3f1 100644
--- a/lib/django-1.4/docs/internals/deprecation.txt
+++ b/lib/django-1.4/docs/internals/deprecation.txt
@@ -7,20 +7,6 @@
 :ref:`deprecation policy <internal-release-deprecation-policy>`. More details
 about each item can often be found in the release notes of two versions prior.
 
-1.3
----
-
-See the :doc:`Django 1.1 release notes</releases/1.1>` for more details on
-these changes.
-
-* ``AdminSite.root()``.  This method of hooking up the admin URLs will be
-  removed in favor of including ``admin.site.urls``.
-
-* Authentication backends need to define the boolean attributes
-  ``supports_object_permissions`` and ``supports_anonymous_user`` until
-  version 1.4, at which point it will be assumed that all backends will
-  support these options.
-
 1.4
 ---
 
diff --git a/lib/django-1.4/docs/internals/release-process.txt b/lib/django-1.4/docs/internals/release-process.txt
index 8ead4d7..3b8e3c2 100644
--- a/lib/django-1.4/docs/internals/release-process.txt
+++ b/lib/django-1.4/docs/internals/release-process.txt
@@ -133,6 +133,20 @@
 * Documentation fixes will be applied to trunk, and, if easily backported, to
   the ``1.3.X`` branch.
 
+.. _lts-releases:
+
+Long-term support (LTS) releases
+================================
+
+Additionally, the Django team will occasionally designate certain releases
+to be "Long-term support" (LTS) releases. LTS releases will get security fixes
+applied for a guaranteed period of time, typically 3+ years, regardless of
+the pace of releases afterwards.
+
+The follow releases have been designated for long-term support:
+
+    * Django 1.4, supported until at least March 2015.
+
 .. _release-process:
 
 Release process
diff --git a/lib/django-1.4/docs/intro/tutorial01.txt b/lib/django-1.4/docs/intro/tutorial01.txt
index d375640..c9b9af3 100644
--- a/lib/django-1.4/docs/intro/tutorial01.txt
+++ b/lib/django-1.4/docs/intro/tutorial01.txt
@@ -658,8 +658,10 @@
     >>> Poll.objects.filter(question__startswith='What')
     [<Poll: What's up?>]
 
-    # Get the poll whose year is 2012.
-    >>> Poll.objects.get(pub_date__year=2012)
+    # Get the poll that was published this year.
+    >>> from django.utils import timezone
+    >>> current_year = timezone.now().year
+    >>> Poll.objects.get(pub_date__year=current_year)
     <Poll: What's up?>
 
     >>> Poll.objects.get(id=2)
@@ -709,8 +711,9 @@
     # The API automatically follows relationships as far as you need.
     # Use double underscores to separate relationships.
     # This works as many levels deep as you want; there's no limit.
-    # Find all Choices for any poll whose pub_date is in 2012.
-    >>> Choice.objects.filter(poll__pub_date__year=2012)
+    # Find all Choices for any poll whose pub_date is in this year
+    # (reusing the 'current_year' variable we created above).
+    >>> Choice.objects.filter(poll__pub_date__year=current_year)
     [<Choice: Not much>, <Choice: The sky>, <Choice: Just hacking again>]
 
     # Let's delete one of the choices. Use delete() for that.
diff --git a/lib/django-1.4/docs/intro/tutorial03.txt b/lib/django-1.4/docs/intro/tutorial03.txt
index 5ed927a..4a0c900 100644
--- a/lib/django-1.4/docs/intro/tutorial03.txt
+++ b/lib/django-1.4/docs/intro/tutorial03.txt
@@ -516,7 +516,7 @@
 lines registering the admin site. Your ``polls/urls.py`` file should now look like
 this::
 
-    from django.conf.urls import patterns, include, url
+    from django.conf.urls import patterns, url
 
     urlpatterns = patterns('polls.views',
         url(r'^$', 'index'),
diff --git a/lib/django-1.4/docs/intro/tutorial04.txt b/lib/django-1.4/docs/intro/tutorial04.txt
index 85d54c3..d3af620 100644
--- a/lib/django-1.4/docs/intro/tutorial04.txt
+++ b/lib/django-1.4/docs/intro/tutorial04.txt
@@ -276,7 +276,7 @@
 
 By default, the :class:`~django.views.generic.list.DetailView` generic
 view uses a template called ``<app name>/<model name>_detail.html``.
-In our case, it'll use the template ``"polls/poll_detail.html"``. The
+In our case, it would use the template ``"polls/poll_detail.html"``. The
 ``template_name`` argument is used to tell Django to use a specific
 template name instead of the autogenerated default template name. We
 also specify the ``template_name`` for the ``results`` list view --
@@ -320,7 +320,7 @@
 Run the server, and use your new polling app based on generic views.
 
 For full details on generic views, see the :doc:`generic views documentation
-</topics/http/generic-views>`.
+</topics/generic-views>`.
 
 Coming soon
 ===========
diff --git a/lib/django-1.4/docs/misc/api-stability.txt b/lib/django-1.4/docs/misc/api-stability.txt
index 75fa6b4..e14e847 100644
--- a/lib/django-1.4/docs/misc/api-stability.txt
+++ b/lib/django-1.4/docs/misc/api-stability.txt
@@ -4,17 +4,19 @@
 
 :doc:`The release of Django 1.0 </releases/1.0>` comes with a promise of API
 stability and forwards-compatibility. In a nutshell, this means that code you
-develop against Django 1.0 will continue to work against 1.1 unchanged, and you
-should need to make only minor changes for any 1.X release.
+develop against a 1.X version of Django will continue to work with future
+1.X releases. You may need to make minor changes when upgrading the version of
+Django your project uses: see the "Backwards incompatible changes" section of
+the :doc:`release note </releases/index>` for the version or versions to which
+you are upgrading.
 
 What "stable" means
 ===================
 
 In this context, stable means:
 
-- All the public APIs -- everything documented in the linked documents below,
-  and all methods that don't begin with an underscore -- will not be moved or
-  renamed without providing backwards-compatible aliases.
+- All the public APIs (everything in this documentation) will not be moved
+  or renamed without providing backwards-compatible aliases.
 
 - If new features are added to these APIs -- which is quite possible --
   they will not break or change the meaning of existing methods. In other
@@ -35,77 +37,7 @@
 ===========
 
 In general, everything covered in the documentation -- with the exception of
-anything in the :doc:`internals area </internals/index>` is considered stable as
-of 1.0. This includes these APIs:
-
-- :doc:`Authorization </topics/auth>`
-
-- :doc:`Caching </topics/cache>`.
-
-- :doc:`Model definition, managers, querying and transactions
-  </topics/db/index>`
-
-- :doc:`Sending email </topics/email>`.
-
-- :doc:`File handling and storage </topics/files>`
-
-- :doc:`Forms </topics/forms/index>`
-
-- :doc:`HTTP request/response handling </topics/http/index>`, including file
-  uploads, middleware, sessions, URL resolution, view, and shortcut APIs.
-
-- :doc:`Generic views </topics/http/generic-views>`.
-
-- :doc:`Internationalization </topics/i18n/index>`.
-
-- :doc:`Pagination </topics/pagination>`
-
-- :doc:`Serialization </topics/serialization>`
-
-- :doc:`Signals </topics/signals>`
-
-- :doc:`Templates </topics/templates>`, including the language, Python-level
-  :doc:`template APIs </ref/templates/index>`, and :doc:`custom template tags
-  and libraries </howto/custom-template-tags>`. We may add new template
-  tags in the future and the names may inadvertently clash with
-  external template tags. Before adding any such tags, we'll ensure that
-  Django raises an error if it tries to load tags with duplicate names.
-
-- :doc:`Testing </topics/testing>`
-
-- :doc:`django-admin utility </ref/django-admin>`.
-
-- :doc:`Built-in middleware </ref/middleware>`
-
-- :doc:`Request/response objects </ref/request-response>`.
-
-- :doc:`Settings </ref/settings>`. Note, though that while the :doc:`list of
-  built-in settings </ref/settings>` can be considered complete we may -- and
-  probably will -- add new settings in future versions. This is one of those
-  places where "'stable' does not mean 'complete.'"
-
-- :doc:`Built-in signals </ref/signals>`. Like settings, we'll probably add
-  new signals in the future, but the existing ones won't break.
-
-- :doc:`Unicode handling </ref/unicode>`.
-
-- Everything covered by the :doc:`HOWTO guides </howto/index>`.
-
-``django.utils``
-----------------
-
-Most of the modules in ``django.utils`` are designed for internal use. Only
-the following parts of :doc:`django.utils </ref/utils>` can be considered stable:
-
-- ``django.utils.cache``
-- ``django.utils.datastructures.SortedDict`` -- only this single class; the
-  rest of the module is for internal use.
-- ``django.utils.encoding``
-- ``django.utils.feedgenerator``
-- ``django.utils.http``
-- ``django.utils.safestring``
-- ``django.utils.translation``
-- ``django.utils.tzinfo``
+anything in the :doc:`internals area </internals/index>` is considered stable.
 
 Exceptions
 ==========
@@ -118,24 +50,8 @@
 
 If we become aware of a security problem -- hopefully by someone following our
 :ref:`security reporting policy <reporting-security-issues>` -- we'll do
-everything necessary to fix it. This might mean breaking backwards compatibility; security trumps the compatibility guarantee.
-
-Contributed applications (``django.contrib``)
----------------------------------------------
-
-While we'll make every effort to keep these APIs stable -- and have no plans to
-break any contrib apps -- this is an area that will have more flux between
-releases. As the Web evolves, Django must evolve with it.
-
-However, any changes to contrib apps will come with an important guarantee:
-we'll make sure it's always possible to use an older version of a contrib app if
-we need to make changes. Thus, if Django 1.5 ships with a backwards-incompatible
-``django.contrib.flatpages``, we'll make sure you can still use the Django 1.4
-version alongside Django 1.5. This will continue to allow for easy upgrades.
-
-Historically, apps in ``django.contrib`` have been more stable than the core, so
-in practice we probably won't have to ever make this exception. However, it's
-worth noting if you're building apps that depend on ``django.contrib``.
+everything necessary to fix it. This might mean breaking backwards
+compatibility; security trumps the compatibility guarantee.
 
 APIs marked as internal
 -----------------------
diff --git a/lib/django-1.4/docs/ref/class-based-views.txt b/lib/django-1.4/docs/ref/class-based-views.txt
index 5223aee..5869a1e 100644
--- a/lib/django-1.4/docs/ref/class-based-views.txt
+++ b/lib/django-1.4/docs/ref/class-based-views.txt
@@ -862,6 +862,12 @@
         one user visiting your view could have an effect on subsequent users
         visiting the same view.
 
+    .. classmethod:: as_view(**initkwargs)
+
+        Returns a callable view that takes a request and returns a response::
+
+            response = MyView.as_view()(request)
+
     .. method:: dispatch(request, *args, **kwargs)
 
         The ``view`` part of the view -- the method that accepts a ``request``
diff --git a/lib/django-1.4/docs/ref/contrib/admin/index.txt b/lib/django-1.4/docs/ref/contrib/admin/index.txt
index c403541..96cc5b2 100644
--- a/lib/django-1.4/docs/ref/contrib/admin/index.txt
+++ b/lib/django-1.4/docs/ref/contrib/admin/index.txt
@@ -1870,6 +1870,10 @@
     Path to a custom template that will be used by the admin site main index
     view.
 
+.. attribute:: AdminSite.app_index_template
+
+    Path to a custom template that will be used by the admin site app index view.
+
 .. attribute:: AdminSite.login_template
 
     Path to a custom template that will be used by the admin site login view.
diff --git a/lib/django-1.4/docs/ref/contrib/csrf.txt b/lib/django-1.4/docs/ref/contrib/csrf.txt
index c841e2f..c6d4488 100644
--- a/lib/django-1.4/docs/ref/contrib/csrf.txt
+++ b/lib/django-1.4/docs/ref/contrib/csrf.txt
@@ -120,7 +120,7 @@
     var csrftoken = getCookie('csrftoken');
 
 The above code could be simplified by using the `jQuery cookie plugin
-<http://plugins.jquery.com/project/Cookie>`_ to replace ``getCookie``:
+<http://plugins.jquery.com/cookie/>`_ to replace ``getCookie``:
 
 .. code-block:: javascript
 
diff --git a/lib/django-1.4/docs/ref/contrib/formtools/form-wizard.txt b/lib/django-1.4/docs/ref/contrib/formtools/form-wizard.txt
index 5794393..1b006e9 100644
--- a/lib/django-1.4/docs/ref/contrib/formtools/form-wizard.txt
+++ b/lib/django-1.4/docs/ref/contrib/formtools/form-wizard.txt
@@ -394,8 +394,10 @@
 .. method:: WizardView.get_form(step=None, data=None, files=None)
 
     This method constructs the form for a given ``step``. If no ``step`` is
-    defined, the current step will be determined automatically.
-    The method gets three arguments:
+    defined, the current step will be determined automatically. If you override
+    ``get_form``, however, you will need to set ``step`` yourself using
+    ``self.steps.current`` as in the example below. The method gets three
+    arguments:
 
     * ``step`` -- The step for which the form instance should be generated.
     * ``data`` -- Gets passed to the form's data argument
@@ -407,6 +409,11 @@
 
         def get_form(self, step=None, data=None, files=None):
             form = super(MyWizard, self).get_form(step, data, files)
+
+            # determine the step if not given
+            if step is None:
+                step = self.steps.current
+
             if step == '1':
                 form.user = self.request.user
             return form
diff --git a/lib/django-1.4/docs/ref/contrib/gis/geoip.txt b/lib/django-1.4/docs/ref/contrib/gis/geoip.txt
index 8e88307..6438ba6 100644
--- a/lib/django-1.4/docs/ref/contrib/gis/geoip.txt
+++ b/lib/django-1.4/docs/ref/contrib/gis/geoip.txt
@@ -17,8 +17,7 @@
     in ``utils``, but will be removed in Django 1.6.
 
 The :class:`GeoIP` object is a ctypes wrapper for the
-`MaxMind GeoIP C API`__. [#]_  This interface is a BSD-licensed alternative
-to the GPL-licensed `Python GeoIP`__ interface provided by MaxMind.
+`MaxMind GeoIP C API`__. [#]_
 
 In order to perform IP-based geolocation, the :class:`GeoIP` object requires
 the GeoIP C libary and either the GeoIP `Country`__ or `City`__ 
@@ -29,7 +28,6 @@
 reference below for more details.
 
 __ http://www.maxmind.com/app/c
-__ http://www.maxmind.com/app/python
 __ http://www.maxmind.com/app/country
 __ http://www.maxmind.com/app/city
 __ http://www.maxmind.com/download/geoip/database/
diff --git a/lib/django-1.4/docs/ref/contrib/messages.txt b/lib/django-1.4/docs/ref/contrib/messages.txt
index 3227645..504691b 100644
--- a/lib/django-1.4/docs/ref/contrib/messages.txt
+++ b/lib/django-1.4/docs/ref/contrib/messages.txt
@@ -275,7 +275,7 @@
     messages.info(request, 'Hello world.', fail_silently=True)
 
 Internally, Django uses this functionality in the create, update, and delete
-:doc:`generic views </topics/http/generic-views>` so that they work even if the
+:doc:`generic views </topics/generic-views>` so that they work even if the
 message framework is disabled.
 
 .. note::
diff --git a/lib/django-1.4/docs/ref/contrib/sitemaps.txt b/lib/django-1.4/docs/ref/contrib/sitemaps.txt
index 3e29ec8..8fd07a6 100644
--- a/lib/django-1.4/docs/ref/contrib/sitemaps.txt
+++ b/lib/django-1.4/docs/ref/contrib/sitemaps.txt
@@ -318,7 +318,7 @@
     from django.views.decorators.cache import cache_page
 
     urlpatterns = patterns('',
-        url(r'^sitemap.xml$',
+        url(r'^sitemap\.xml$',
             cache_page(86400)(sitemaps_views.index),
             {'sitemaps': sitemaps, 'sitemap_url_name': 'sitemaps'}),
         url(r'^sitemap-(?P<section>.+)\.xml$',
diff --git a/lib/django-1.4/docs/ref/contrib/syndication.txt b/lib/django-1.4/docs/ref/contrib/syndication.txt
index 754ac58..7497bbe 100644
--- a/lib/django-1.4/docs/ref/contrib/syndication.txt
+++ b/lib/django-1.4/docs/ref/contrib/syndication.txt
@@ -17,7 +17,7 @@
 lower-level way.
 
 .. _RSS: http://www.whatisrss.com/
-.. _Atom: http://www.atomenabled.org/
+.. _Atom: http://tools.ietf.org/html/rfc4287
 
 The high-level framework
 ========================
diff --git a/lib/django-1.4/docs/ref/databases.txt b/lib/django-1.4/docs/ref/databases.txt
index dba278b..2691979 100644
--- a/lib/django-1.4/docs/ref/databases.txt
+++ b/lib/django-1.4/docs/ref/databases.txt
@@ -432,6 +432,22 @@
 statement. If ``select_for_update()`` is used with ``nowait=True`` then a
 ``DatabaseError`` will be raised.
 
+Automatic typecasting can cause unexpected results
+--------------------------------------------------
+
+When performing a query on a string type, but with an integer value, MySQL will
+coerce the types of all values in the table to an integer before performing the
+comparison. If your table contains the values ``'abc'``, ``'def'`` and you
+query for ``WHERE mycolumn=0``, both rows will match. Similarly, ``WHERE mycolumn=1``
+will match the value ``'abc1'``. Therefore, string type fields included in Django
+will always cast the value to a string before using it in a query.
+
+If you implement custom model fields that inherit from :class:`~django.db.models.Field`
+directly, are overriding :meth:`~django.db.models.Field.get_prep_value`, or use
+:meth:`extra() <django.db.models.query.QuerySet.extra>` or
+:meth:`raw() <django.db.models.Manager.raw>`, you should ensure that you
+perform the appropriate typecasting.
+
 .. _sqlite-notes:
 
 SQLite notes
@@ -705,6 +721,22 @@
 Oracle imposes a name length limit of 30 characters. To accommodate this, the
 backend truncates database identifiers to fit, replacing the final four
 characters of the truncated name with a repeatable MD5 hash value.
+Additionally, the backend turns database identifiers to all-uppercase.
+
+To prevent these transformations (this is usually required only when dealing
+with legacy databases or accessing tables which belong to other users), use
+a quoted name as the value for ``db_table``::
+
+    class LegacyModel(models.Model):
+        class Meta:
+       	    db_table = '"name_left_in_lowercase"'
+
+    class ForeignModel(models.Model):
+        class Meta:
+       	    db_table = '"OTHER_USER"."NAME_ONLY_SEEMS_OVER_30"'
+
+Quoted names can also be used with Django's other supported database 
+backends; except for Oracle, however, the quotes have no effect.
 
 When running syncdb, an ``ORA-06552`` error may be encountered if
 certain Oracle keywords are used as the name of a model field or the
diff --git a/lib/django-1.4/docs/ref/django-admin.txt b/lib/django-1.4/docs/ref/django-admin.txt
index 0c94611..f6df5d5 100644
--- a/lib/django-1.4/docs/ref/django-admin.txt
+++ b/lib/django-1.4/docs/ref/django-admin.txt
@@ -1035,7 +1035,8 @@
 with the ``--name`` option. The :class:`template context
 <django.template.Context>` used is:
 
-- Any option passed to the startproject command
+- Any option passed to the startapp command (among the command's supported
+  options)
 - ``project_name`` -- the project name as passed to the command
 - ``project_directory`` -- the full path of the newly created project
 - ``secret_key`` -- a random key for the :setting:`SECRET_KEY` setting
diff --git a/lib/django-1.4/docs/ref/forms/widgets.txt b/lib/django-1.4/docs/ref/forms/widgets.txt
index 60ba00c..a745f1b 100644
--- a/lib/django-1.4/docs/ref/forms/widgets.txt
+++ b/lib/django-1.4/docs/ref/forms/widgets.txt
@@ -278,15 +278,10 @@
         * A single value (e.g., a string) that is the "compressed" representation
           of a ``list`` of values.
 
-        If `value` is a list, output of :meth:`~MultiWidget.render` will be a
-        concatenation of rendered child widgets. If `value` is not a list, it
-        will be first processed by the method :meth:`~MultiWidget.decompress()`
-        to create the list and then processed as above.
-
-        In the second case -- i.e., if the value is *not* a list --
-        ``render()`` will first decompress the value into a ``list`` before
-        rendering it. It does so by calling the ``decompress()`` method, which
-        :class:`MultiWidget`'s subclasses must implement (see above).
+        If ``value`` is a list, the output of :meth:`~MultiWidget.render` will
+        be a concatenation of rendered child widgets. If ``value`` is not a
+        list, it will first be processed by the method
+        :meth:`~MultiWidget.decompress()` to create the list and then rendered.
 
         When ``render()`` executes its HTML rendering, each value in the list
         is rendered with the corresponding widget -- the first value is
diff --git a/lib/django-1.4/docs/ref/middleware.txt b/lib/django-1.4/docs/ref/middleware.txt
index cb8f737..a9c67db 100644
--- a/lib/django-1.4/docs/ref/middleware.txt
+++ b/lib/django-1.4/docs/ref/middleware.txt
@@ -37,7 +37,7 @@
 Adds a few conveniences for perfectionists:
 
 * Forbids access to user agents in the :setting:`DISALLOWED_USER_AGENTS`
-  setting, which should be a list of strings.
+  setting, which should be a list of compiled regular expression objects.
 
 * Performs URL rewriting based on the :setting:`APPEND_SLASH` and
   :setting:`PREPEND_WWW` settings.
@@ -90,6 +90,20 @@
 
 .. class:: GZipMiddleware
 
+.. warning::
+
+    Security researchers recently revealed that when compression techniques
+    (including ``GZipMiddleware``) are used on a website, the site becomes
+    exposed to a number of possible attacks. These approaches can be used to
+    compromise, amongst other things, Django's CSRF protection. Before using
+    ``GZipMiddleware`` on your site, you should consider very carefully whether
+    you are subject to these attacks. If you're in *any* doubt about whether
+    you're affected, you should avoid using ``GZipMiddleware``. For more
+    details, see the `the BREACH paper (PDF)`_ and `breachattack.com`_.
+
+    .. _the BREACH paper (PDF): http://breachattack.com/resources/BREACH%20-%20SSL,%20gone%20in%2030%20seconds.pdf
+    .. _breachattack.com: http://breachattack.com
+
 Compresses content for browsers that understand GZip compression (all modern
 browsers).
 
diff --git a/lib/django-1.4/docs/ref/models/fields.txt b/lib/django-1.4/docs/ref/models/fields.txt
index b186a46..cb359fb 100644
--- a/lib/django-1.4/docs/ref/models/fields.txt
+++ b/lib/django-1.4/docs/ref/models/fields.txt
@@ -252,8 +252,8 @@
 
 If ``True``, this field is the primary key for the model.
 
-If you don't specify ``primary_key=True`` for any fields in your model, Django
-will automatically add an :class:`IntegerField` to hold the primary key, so you
+If you don't specify ``primary_key=True`` for any field in your model, Django
+will automatically add an :class:`AutoField` to hold the primary key, so you
 don't need to set ``primary_key=True`` on any of your fields unless you want to
 override the default primary-key behavior. For more, see
 :ref:`automatic-primary-key-fields`.
diff --git a/lib/django-1.4/docs/ref/models/instances.txt b/lib/django-1.4/docs/ref/models/instances.txt
index 6db0963..1d89321 100644
--- a/lib/django-1.4/docs/ref/models/instances.txt
+++ b/lib/django-1.4/docs/ref/models/instances.txt
@@ -48,7 +48,7 @@
     2. Add a method on a custom manager (usually preferred)::
 
         class BookManager(models.Manager):
-            def create_book(title):
+            def create_book(self, title):
                 book = self.create(title=title)
                 # do something with the book
                 return book
diff --git a/lib/django-1.4/docs/ref/models/options.txt b/lib/django-1.4/docs/ref/models/options.txt
index 9d076f6..9767ea5 100644
--- a/lib/django-1.4/docs/ref/models/options.txt
+++ b/lib/django-1.4/docs/ref/models/options.txt
@@ -67,6 +67,18 @@
     the table name via ``db_table``, particularly if you are using the MySQL
     backend. See the :ref:`MySQL notes <mysql-notes>` for more details.
 
+.. admonition:: Table name quoting for Oracle
+
+   In order to to meet the 30-char limitation Oracle has on table names,
+   and match the usual conventions for Oracle databases, Django may shorten
+   table names and turn them all-uppercase. To prevent such transformations,
+   use a quoted name as the value for ``db_table``::
+
+       db_table = '"name_left_in_lowercase"'
+
+   Such quoted names can also be used with Django's other supported database 
+   backends; except for Oracle, however, the quotes have no effect. See the
+   :ref:`Oracle notes <oracle-notes>` for more details.
 
 ``db_tablespace``
 -----------------
diff --git a/lib/django-1.4/docs/ref/models/querysets.txt b/lib/django-1.4/docs/ref/models/querysets.txt
index a32c9f5..2decddb 100644
--- a/lib/django-1.4/docs/ref/models/querysets.txt
+++ b/lib/django-1.4/docs/ref/models/querysets.txt
@@ -1041,6 +1041,16 @@
 
       Entry.objects.extra(where=['headline=%s'], params=['Lennon'])
 
+.. warning::
+
+    If you are performing queries on MySQL, note that MySQL's silent type coercion
+    may cause unexpected results when mixing types. If you query on a string
+    type column, but with an integer value, MySQL will coerce the types of all values
+    in the table to an integer before performing the comparison. For example, if your
+    table contains the values ``'abc'``, ``'def'`` and you query for ``WHERE mycolumn=0``,
+    both rows will match. To prevent this, perform the correct typecasting
+    before using the value in a query.
+
 defer
 ~~~~~
 
@@ -1442,7 +1452,7 @@
 contrast, ``iterator()`` will read results directly, without doing any caching
 at the ``QuerySet`` level (internally, the default iterator calls ``iterator()``
 and caches the return value). For a ``QuerySet`` which returns a large number of
-objects that you only need to access once, this can results in better
+objects that you only need to access once, this can result in better
 performance and a significant reduction in memory.
 
 Note that using ``iterator()`` on a ``QuerySet`` which has already been
diff --git a/lib/django-1.4/docs/ref/request-response.txt b/lib/django-1.4/docs/ref/request-response.txt
index d435822..e3cc4de 100644
--- a/lib/django-1.4/docs/ref/request-response.txt
+++ b/lib/django-1.4/docs/ref/request-response.txt
@@ -492,6 +492,26 @@
         >>> q.lists()
         [(u'a', [u'1', u'2', u'3'])]
 
+.. method:: QueryDict.pop(key)
+
+    Returns a list of values for the given key and removes them from the
+    dictionary. Raises ``KeyError`` if the key does not exist. For example::
+
+        >>> q = QueryDict('a=1&a=2&a=3', mutable=True)
+        >>> q.pop('a')
+        [u'1', u'2', u'3']
+
+.. method:: QueryDict.popitem()
+
+    Removes an arbitrary member of the dictionary (since there's no concept
+    of ordering), and returns a two value tuple containing the key and a list
+    of all values for the key. Raises ``KeyError`` when called on an empty
+    dictionary. For example::
+
+        >>> q = QueryDict('a=1&a=2&a=3', mutable=True)
+        >>> q.popitem()
+        (u'a', [u'1', u'2', u'3'])
+
 .. method:: QueryDict.dict()
 
     .. versionadded:: 1.4
diff --git a/lib/django-1.4/docs/ref/settings.txt b/lib/django-1.4/docs/ref/settings.txt
index 3466482..43aa9b2 100644
--- a/lib/django-1.4/docs/ref/settings.txt
+++ b/lib/django-1.4/docs/ref/settings.txt
@@ -68,6 +68,47 @@
 Note that Django will email *all* of these people whenever an error happens.
 See :doc:`/howto/error-reporting` for more information.
 
+.. setting:: ALLOWED_HOSTS
+
+ALLOWED_HOSTS
+-------------
+
+Default: ``['*']``
+
+A list of strings representing the host/domain names that this Django site can
+serve. This is a security measure to prevent an attacker from poisoning caches
+and password reset emails with links to malicious hosts by submitting requests
+with a fake HTTP ``Host`` header, which is possible even under many
+seemingly-safe webserver configurations.
+
+Values in this list can be fully qualified names (e.g. ``'www.example.com'``),
+in which case they will be matched against the request's ``Host`` header
+exactly (case-insensitive, not including port). A value beginning with a period
+can be used as a subdomain wildcard: ``'.example.com'`` will match
+``example.com``, ``www.example.com``, and any other subdomain of
+``example.com``. A value of ``'*'`` will match anything; in this case you are
+responsible to provide your own validation of the ``Host`` header (perhaps in a
+middleware; if so this middleware must be listed first in
+:setting:`MIDDLEWARE_CLASSES`).
+
+If the ``Host`` header (or ``X-Forwarded-Host`` if
+:setting:`USE_X_FORWARDED_HOST` is enabled) does not match any value in this
+list, the :meth:`django.http.HttpRequest.get_host()` method will raise
+:exc:`~django.core.exceptions.SuspiciousOperation`.
+
+When :setting:`DEBUG` is ``True`` or when running tests, host validation is
+disabled; any host will be accepted. Thus it's usually only necessary to set it
+in production.
+
+This validation only applies via :meth:`~django.http.HttpRequest.get_host()`;
+if your code accesses the ``Host`` header directly from ``request.META`` you
+are bypassing this security protection.
+
+The default value of this setting in Django 1.4.4+ is ``['*']`` (accept any
+host) in order to avoid breaking backwards-compatibility in a security update,
+but in Django 1.5+ the default is ``[]`` and explicitly configuring this
+setting is required.
+
 .. setting:: ALLOWED_INCLUDE_ROOTS
 
 ALLOWED_INCLUDE_ROOTS
@@ -1522,6 +1563,25 @@
 See also :setting:`DECIMAL_SEPARATOR`, :setting:`THOUSAND_SEPARATOR` and
 :setting:`USE_THOUSAND_SEPARATOR`.
 
+.. setting:: PASSWORD_HASHERS
+
+PASSWORD_HASHERS
+----------------
+
+.. versionadded:: 1.4
+
+See :ref:`auth_password_storage`.
+
+Default::
+
+    ('django.contrib.auth.hashers.PBKDF2PasswordHasher',
+     'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
+     'django.contrib.auth.hashers.BCryptPasswordHasher',
+     'django.contrib.auth.hashers.SHA1PasswordHasher',
+     'django.contrib.auth.hashers.MD5PasswordHasher',
+     'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
+     'django.contrib.auth.hashers.CryptPasswordHasher',)
+
 .. setting:: PASSWORD_RESET_TIMEOUT_DAYS
 
 PASSWORD_RESET_TIMEOUT_DAYS
diff --git a/lib/django-1.4/docs/ref/templates/builtins.txt b/lib/django-1.4/docs/ref/templates/builtins.txt
index e547cbc..e2734c9 100644
--- a/lib/django-1.4/docs/ref/templates/builtins.txt
+++ b/lib/django-1.4/docs/ref/templates/builtins.txt
@@ -851,6 +851,8 @@
     * New York: 20,000,000
 * India
     * Calcutta: 15,000,000
+* USA
+    * Chicago: 7,000,000
 * Japan
     * Tokyo: 33,000,000
 
@@ -1986,7 +1988,7 @@
 striptags
 ^^^^^^^^^
 
-Strips all [X]HTML tags.
+Makes all possible efforts to strip all [X]HTML tags.
 
 For example::
 
@@ -1995,6 +1997,16 @@
 If ``value`` is ``"<b>Joel</b> <button>is</button> a <span>slug</span>"``, the
 output will be ``"Joel is a slug"``.
 
+.. admonition:: No safety guarantee
+
+    Note that ``striptags`` doesn't give any guarantee about its output being
+    entirely HTML safe, particularly with non valid HTML input. So **NEVER**
+    apply the ``safe`` filter to a ``striptags`` output.
+    If you are looking for something more robust, you can use the ``bleach``
+    Python library, notably its `clean`_ method.
+
+.. _clean: http://bleach.readthedocs.org/en/latest/clean.html
+
 .. templatefilter:: time
 
 time
diff --git a/lib/django-1.4/docs/ref/unicode.txt b/lib/django-1.4/docs/ref/unicode.txt
index 1286dcf..4d644cd 100644
--- a/lib/django-1.4/docs/ref/unicode.txt
+++ b/lib/django-1.4/docs/ref/unicode.txt
@@ -17,7 +17,7 @@
 a more restrictive encoding -- for example, latin1 (iso8859-1) -- you won't be
 able to store certain characters in the database, and information will be lost.
 
-* MySQL users, refer to the `MySQL manual`_ (section 9.1.3.2 for MySQL 5.1)
+* MySQL users, refer to the `MySQL manual`_ (section 10.1.3.2 for MySQL 5.1)
   for details on how to set or alter the database character set encoding.
 
 * PostgreSQL users, refer to the `PostgreSQL manual`_ (section 21.2.2 in
diff --git a/lib/django-1.4/docs/ref/utils.txt b/lib/django-1.4/docs/ref/utils.txt
index dd86b3a..9dd0739 100644
--- a/lib/django-1.4/docs/ref/utils.txt
+++ b/lib/django-1.4/docs/ref/utils.txt
@@ -376,7 +376,7 @@
 
 .. class:: Atom1Feed(SyndicationFeed)
 
-    Spec: http://www.atomenabled.org/developers/syndication/atom-format-spec.php
+    Spec: http://tools.ietf.org/html/rfc4287
 
 ``django.utils.functional``
 ===========================
diff --git a/lib/django-1.4/docs/releases/1.3.3.txt b/lib/django-1.4/docs/releases/1.3.3.txt
new file mode 100644
index 0000000..437cbfb
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.3.3.txt
@@ -0,0 +1,11 @@
+==========================
+Django 1.3.3 release notes
+==========================
+
+*August 1, 2012*
+
+Following Monday's security release of :doc:`Django 1.3.2 </releases/1.3.2>`,
+we began receiving reports that one of the fixes applied was breaking Python
+2.4 compatibility for Django 1.3. Since Python 2.4 is a supported Python
+version for that release series, this release fixes compatibility with
+Python 2.4.
diff --git a/lib/django-1.4/docs/releases/1.3.4.txt b/lib/django-1.4/docs/releases/1.3.4.txt
new file mode 100644
index 0000000..3a174b3
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.3.4.txt
@@ -0,0 +1,37 @@
+==========================
+Django 1.3.4 release notes
+==========================
+
+*October 17, 2012*
+
+This is the fourth release in the Django 1.3 series.
+
+Host header poisoning
+---------------------
+
+Some parts of Django -- independent of end-user-written applications -- make
+use of full URLs, including domain name, which are generated from the HTTP Host
+header. Some attacks against this are beyond Django's ability to control, and
+require the web server to be properly configured; Django's documentation has
+for some time contained notes advising users on such configuration.
+
+Django's own built-in parsing of the Host header is, however, still vulnerable,
+as was reported to us recently. The Host header parsing in Django 1.3.3 and
+Django 1.4.1 -- specifically, ``django.http.HttpRequest.get_host()`` -- was
+incorrectly handling username/password information in the header. Thus, for
+example, the following Host header would be accepted by Django when running on
+"validsite.com"::
+
+    Host: validsite.com:random@evilsite.com
+
+Using this, an attacker can cause parts of Django -- particularly the
+password-reset mechanism -- to generate and display arbitrary URLs to users.
+
+To remedy this, the parsing in ``HttpRequest.get_host()`` is being modified;
+Host headers which contain potentially dangerous content (such as
+username/password pairs) now raise the exception
+:exc:`django.core.exceptions.SuspiciousOperation`.
+
+Details of this issue were initially posted online as a `security advisory`_.
+
+.. _security advisory: https://www.djangoproject.com/weblog/2012/oct/17/security/
diff --git a/lib/django-1.4/docs/releases/1.3.5.txt b/lib/django-1.4/docs/releases/1.3.5.txt
new file mode 100644
index 0000000..65c4032
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.3.5.txt
@@ -0,0 +1,60 @@
+==========================
+Django 1.3.5 release notes
+==========================
+
+*December 10, 2012*
+
+Django 1.3.5 addresses two security issues present in previous Django releases
+in the 1.3 series.
+
+Please be aware that this security release is slightly different from previous
+ones. Both issues addressed here have been dealt with in prior security updates
+to Django. In one case, we have received ongoing reports of problems, and in
+the other we've chosen to take further steps to tighten up Django's code in
+response to independent discovery of potential problems from multiple sources.
+
+Host header poisoning
+---------------------
+
+Several earlier Django security releases focused on the issue of poisoning the
+HTTP Host header, causing Django to generate URLs pointing to arbitrary,
+potentially-malicious domains.
+
+In response to further input received and reports of continuing issues
+following the previous release, we're taking additional steps to tighten Host
+header validation. Rather than attempt to accommodate all features HTTP
+supports here, Django's Host header validation attempts to support a smaller,
+but far more common, subset:
+
+* Hostnames must consist of characters [A-Za-z0-9] plus hyphen ('-') or dot
+  ('.').
+* IP addresses -- both IPv4 and IPv6 -- are permitted.
+* Port, if specified, is numeric.
+
+Any deviation from this will now be rejected, raising the exception
+:exc:`django.core.exceptions.SuspiciousOperation`.
+
+Redirect poisoning
+------------------
+
+Also following up on a previous issue: in July of this year, we made changes to
+Django's HTTP redirect classes, performing additional validation of the scheme
+of the URL to redirect to (since, both within Django's own supplied
+applications and many third-party applications, accepting a user-supplied
+redirect target is a common pattern).
+
+Since then, two independent audits of the code turned up further potential
+problems. So, similar to the Host-header issue, we are taking steps to provide
+tighter validation in response to reported problems (primarily with third-party
+applications, but to a certain extent also within Django itself). This comes in
+two parts:
+
+1. A new utility function, ``django.utils.http.is_safe_url``, is added; this
+function takes a URL and a hostname, and checks that the URL is either
+relative, or if absolute matches the supplied hostname. This function is
+intended for use whenever user-supplied redirect targets are accepted, to
+ensure that such redirects cannot lead to arbitrary third-party sites.
+
+2. All of Django's own built-in views -- primarily in the authentication system
+-- which allow user-supplied redirect targets now use ``is_safe_url`` to
+validate the supplied URL.
diff --git a/lib/django-1.4/docs/releases/1.3.6.txt b/lib/django-1.4/docs/releases/1.3.6.txt
new file mode 100644
index 0000000..d55199a
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.3.6.txt
@@ -0,0 +1,78 @@
+==========================
+Django 1.3.6 release notes
+==========================
+
+*February 19, 2013*
+
+Django 1.3.6 fixes four security issues present in previous Django releases in
+the 1.3 series.
+
+This is the sixth bugfix/security release in the Django 1.3 series.
+
+
+Host header poisoning
+---------------------
+
+Some parts of Django -- independent of end-user-written applications -- make
+use of full URLs, including domain name, which are generated from the HTTP Host
+header. Django's documentation has for some time contained notes advising users
+on how to configure webservers to ensure that only valid Host headers can reach
+the Django application. However, it has been reported to us that even with the
+recommended webserver configurations there are still techniques available for
+tricking many common webservers into supplying the application with an
+incorrect and possibly malicious Host header.
+
+For this reason, Django 1.3.6 adds a new setting, ``ALLOWED_HOSTS``, which
+should contain an explicit list of valid host/domain names for this site. A
+request with a Host header not matching an entry in this list will raise
+``SuspiciousOperation`` if ``request.get_host()`` is called. For full details
+see the documentation for the :setting:`ALLOWED_HOSTS` setting.
+
+The default value for this setting in Django 1.3.6 is ``['*']`` (matching any
+host), for backwards-compatibility, but we strongly encourage all sites to set
+a more restrictive value.
+
+This host validation is disabled when ``DEBUG`` is ``True`` or when running tests.
+
+
+XML deserialization
+-------------------
+
+The XML parser in the Python standard library is vulnerable to a number of
+attacks via external entities and entity expansion. Django uses this parser for
+deserializing XML-formatted database fixtures. The fixture deserializer is not
+intended for use with untrusted data, but in order to err on the side of safety
+in Django 1.3.6 the XML deserializer refuses to parse an XML document with a
+DTD (DOCTYPE definition), which closes off these attack avenues.
+
+These issues in the Python standard library are CVE-2013-1664 and
+CVE-2013-1665. More information available `from the Python security team`_.
+
+Django's XML serializer does not create documents with a DTD, so this should
+not cause any issues with the typical round-trip from ``dumpdata`` to
+``loaddata``, but if you feed your own XML documents to the ``loaddata``
+management command, you will need to ensure they do not contain a DTD.
+
+.. _from the Python security team: http://blog.python.org/2013/02/announcing-defusedxml-fixes-for-xml.html
+
+
+Formset memory exhaustion
+-------------------------
+
+Previous versions of Django did not validate or limit the form-count data
+provided by the client in a formset's management form, making it possible to
+exhaust a server's available memory by forcing it to create very large numbers
+of forms.
+
+In Django 1.3.6, all formsets have a strictly-enforced maximum number of forms
+(1000 by default, though it can be set higher via the ``max_num`` formset
+factory argument).
+
+
+Admin history view information leakage
+--------------------------------------
+
+In previous versions of Django, an admin user without change permission on a
+model could still view the unicode representation of instances via their admin
+history log. Django 1.3.6 now limits the admin history log view for an object
+to users with change permission for that model.
diff --git a/lib/django-1.4/docs/releases/1.3.7.txt b/lib/django-1.4/docs/releases/1.3.7.txt
new file mode 100644
index 0000000..3cccfcf
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.3.7.txt
@@ -0,0 +1,13 @@
+==========================
+Django 1.3.7 release notes
+==========================
+
+*February 20, 2013*
+
+Django 1.3.7 corrects a packaging problem with yesterday's :doc:`1.3.6 release
+</releases/1.3.6>`.
+
+The release contained stray ``.pyc`` files that caused "bad magic number"
+errors when running with some versions of Python. This releases corrects this,
+and also fixes a bad documentation link in the project template ``settings.py``
+file generated by ``manage.py startproject``.
diff --git a/lib/django-1.4/docs/releases/1.3.txt b/lib/django-1.4/docs/releases/1.3.txt
index 3dc90af..8ac6d14 100644
--- a/lib/django-1.4/docs/releases/1.3.txt
+++ b/lib/django-1.4/docs/releases/1.3.txt
@@ -653,6 +653,15 @@
 and reset their password. In Django 1.3 inactive users will receive the same
 message as a nonexistent account.
 
+Password reset view now accepts ``from_email``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :func:`django.contrib.auth.views.password_reset` view now accepts a
+``from_email`` parameter, which is passed to the ``password_reset_form``'s
+``save()`` method as a keyword argument. If you are using this view with a
+custom password reset form, then you will need to ensure your form's ``save()``
+method accepts this keyword argument.
+
 .. _deprecated-features-1.3:
 
 Features deprecated in 1.3
diff --git a/lib/django-1.4/docs/releases/1.4-alpha-1.txt b/lib/django-1.4/docs/releases/1.4-alpha-1.txt
index b5ec782..5f78644 100644
--- a/lib/django-1.4/docs/releases/1.4-alpha-1.txt
+++ b/lib/django-1.4/docs/releases/1.4-alpha-1.txt
@@ -337,9 +337,10 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-Two new function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, were added to allow designating the
-local variables and POST parameters which may contain sensitive
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
 information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
diff --git a/lib/django-1.4/docs/releases/1.4-beta-1.txt b/lib/django-1.4/docs/releases/1.4-beta-1.txt
index 88f32ea..ee2c2e9 100644
--- a/lib/django-1.4/docs/releases/1.4-beta-1.txt
+++ b/lib/django-1.4/docs/releases/1.4-beta-1.txt
@@ -375,9 +375,10 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-Two new function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, were added to allow designating the
-local variables and POST parameters which may contain sensitive
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
 information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
diff --git a/lib/django-1.4/docs/releases/1.4.10.txt b/lib/django-1.4/docs/releases/1.4.10.txt
new file mode 100644
index 0000000..97ac139
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.10.txt
@@ -0,0 +1,14 @@
+============================
+ Django 1.4.10 release notes
+============================
+
+*November 6, 2013*
+
+Django 1.4.10 fixes a Python-compatibility bug in the 1.4 series.
+
+Python compatibility
+--------------------
+
+Django 1.4.9 inadvertently introduced issues with Python 2.5 compatibility.
+Django 1.4.10 restores Python 2.5 compatibility. This was issue #21362 in
+Django's Trac.
diff --git a/lib/django-1.4/docs/releases/1.4.11.txt b/lib/django-1.4/docs/releases/1.4.11.txt
new file mode 100644
index 0000000..2419454
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.11.txt
@@ -0,0 +1,110 @@
+===========================
+Django 1.4.11 release notes
+===========================
+
+*April 21, 2014*
+
+Django 1.4.11 fixes three security issues in 1.4.10. Additionally,
+Django's vendored version of six, :mod:`django.utils.six`, has been
+upgraded to the latest release (1.6.1).
+
+Unexpected code execution using ``reverse()``
+=============================================
+
+Django's URL handling is based on a mapping of regex patterns
+(representing the URLs) to callable views, and Django's own processing
+consists of matching a requested URL against those patterns to
+determine the appropriate view to invoke.
+
+Django also provides a convenience function --
+:func:`~django.core.urlresolvers.reverse` -- which performs this process
+in the opposite direction. The ``reverse()`` function takes
+information about a view and returns a URL which would invoke that
+view. Use of ``reverse()`` is encouraged for application developers,
+as the output of ``reverse()`` is always based on the current URL
+patterns, meaning developers do not need to change other code when
+making changes to URLs.
+
+One argument signature for ``reverse()`` is to pass a dotted Python
+path to the desired view. In this situation, Django will import the
+module indicated by that dotted path as part of generating the
+resulting URL. If such a module has import-time side effects, those
+side effects will occur.
+
+Thus it is possible for an attacker to cause unexpected code
+execution, given the following conditions:
+
+1. One or more views are present which construct a URL based on user
+   input (commonly, a "next" parameter in a querystring indicating
+   where to redirect upon successful completion of an action).
+
+2. One or more modules are known to an attacker to exist on the
+   server's Python import path, which perform code execution with side
+   effects on importing.
+
+To remedy this, ``reverse()`` will now only accept and import dotted
+paths based on the view-containing modules listed in the project's :doc:`URL
+pattern configuration </topics/http/urls>`, so as to ensure that only modules
+the developer intended to be imported in this fashion can or will be imported.
+
+Caching of anonymous pages could reveal CSRF token
+==================================================
+
+Django includes both a :doc:`caching framework </topics/cache>` and a system
+for :doc:`preventing cross-site request forgery (CSRF) attacks
+</ref/contrib/csrf/>`. The CSRF-protection system is based on a random nonce
+sent to the client in a cookie which must be sent by the client on future
+requests and, in forms, a hidden value which must be submitted back with the
+form.
+
+The caching framework includes an option to cache responses to
+anonymous (i.e., unauthenticated) clients.
+
+When the first anonymous request to a given page is by a client which
+did not have a CSRF cookie, the cache framework will also cache the
+CSRF cookie and serve the same nonce to other anonymous clients who
+do not have a CSRF cookie. This can allow an attacker to obtain a
+valid CSRF cookie value and perform attacks which bypass the check for
+the cookie.
+
+To remedy this, the caching framework will no longer cache such
+responses. The heuristic for this will be:
+
+1. If the incoming request did not submit any cookies, and
+
+2. If the response did send one or more cookies, and
+
+3. If the ``Vary: Cookie`` header is set on the response, then the
+   response will not be cached.
+
+MySQL typecasting
+=================
+
+The MySQL database is known to "typecast" on certain queries; for
+example, when querying a table which contains string values, but using
+a query which filters based on an integer value, MySQL will first
+silently coerce the strings to integers and return a result based on that.
+
+If a query is performed without first converting values to the
+appropriate type, this can produce unexpected results, similar to what
+would occur if the query itself had been manipulated.
+
+Django's model field classes are aware of their own types and most
+such classes perform explicit conversion of query arguments to the
+correct database-level type before querying. However, three model
+field classes did not correctly convert their arguments:
+
+* :class:`~django.db.models.FilePathField`
+* :class:`~django.db.models.GenericIPAddressField`
+* :class:`~django.db.models.IPAddressField`
+
+These three fields have been updated to convert their arguments to the
+correct types before querying.
+
+Additionally, developers of custom model fields are now warned via
+documentation to ensure their custom field classes will perform
+appropriate type conversions, and users of the :meth:`raw()
+<django.db.models.query.QuerySet.raw>` and :meth:`extra()
+<django.db.models.query.QuerySet.extra>` query methods -- which allow the
+developer to supply raw SQL or SQL fragments -- will be advised to ensure they
+perform appropriate manual type conversions prior to executing queries.
diff --git a/lib/django-1.4/docs/releases/1.4.12.txt b/lib/django-1.4/docs/releases/1.4.12.txt
new file mode 100644
index 0000000..41752a7
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.12.txt
@@ -0,0 +1,14 @@
+===========================
+Django 1.4.12 release notes
+===========================
+
+*April 28, 2014*
+
+Django 1.4.12 fixes a regression in the 1.4.11 security release.
+
+Bugfixes
+========
+
+* Restored the ability to :meth:`~django.core.urlresolvers.reverse` views
+  created using :func:`functools.partial()`
+  (`#22486 <http://code.djangoproject.com/ticket/22486>`_)
diff --git a/lib/django-1.4/docs/releases/1.4.13.txt b/lib/django-1.4/docs/releases/1.4.13.txt
new file mode 100644
index 0000000..bcbe460
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.13.txt
@@ -0,0 +1,47 @@
+==========================
+Django 1.4.13 release notes
+==========================
+
+*May 13, 2014*
+
+Django 1.4.13 fixes two security issues in 1.4.12.
+
+
+Caches may incorrectly be allowed to store and serve private data
+=================================================================
+In certain situations, Django may allow caches to store private data
+related to a particular session and then serve that data to requests
+with a different session, or no session at all. This can both lead to
+information disclosure, and can be a vector for cache poisoning.
+
+When using Django sessions, Django will set a ``Vary: Cookie`` header to
+ensure caches do not serve cached data to requests from other sessions.
+However, older versions of Internet Explorer (most likely only Internet
+Explorer 6, and Internet Explorer 7 if run on Windows XP or Windows Server
+2003) are unable to handle the ``Vary`` header in combination with many content
+types. Therefore, Django would remove the header if the request was made by
+Internet Explorer.
+
+To remedy this, the special behaviour for these older Internet Explorer versions
+has been removed, and the ``Vary`` header is no longer stripped from the response.
+In addition, modifications to the ``Cache-Control`` header for all Internet Explorer
+requests with a ``Content-Disposition`` header, have also been removed as they
+were found to have similar issues.
+
+
+Malformed redirect URLs from user input not correctly validated
+===============================================================
+The validation for redirects did not correctly validate some malformed URLs,
+which are accepted by some browsers. This allows a user to be redirected to
+an unsafe URL unexpectedly.
+
+Django relies on user input in some cases (e.g.
+:func:`django.contrib.auth.views.login`, ``django.contrib.comments``, and
+:doc:`i18n </topics/i18n/index>`) to redirect the user to an "on success" URL.
+The security checks for these redirects (namely
+``django.util.http.is_safe_url()``) did not correctly validate some malformed
+URLs, such as `http:\\\\\\djangoproject.com`, which are accepted by some browsers
+with more liberal URL parsing.
+
+To remedy this, the validation in ``is_safe_url()`` has been tightened to be able
+to handle and correctly validate these malformed URLs.
diff --git a/lib/django-1.4/docs/releases/1.4.2.txt b/lib/django-1.4/docs/releases/1.4.2.txt
index 07eec39..a6150f5 100644
--- a/lib/django-1.4/docs/releases/1.4.2.txt
+++ b/lib/django-1.4/docs/releases/1.4.2.txt
@@ -17,7 +17,7 @@
 
 Django's own built-in parsing of the Host header is, however, still vulnerable,
 as was reported to us recently. The Host header parsing in Django 1.3.3 and
-Django 1.4.1 -- specifically, django.http.HttpRequest.get_host() -- was
+Django 1.4.1 -- specifically, ``django.http.HttpRequest.get_host()`` -- was
 incorrectly handling username/password information in the header. Thus, for
 example, the following Host header would be accepted by Django when running on
 "validsite.com"::
@@ -27,9 +27,10 @@
 Using this, an attacker can cause parts of Django -- particularly the
 password-reset mechanism -- to generate and display arbitrary URLs to users.
 
-To remedy this, the parsing in HttpRequest.get_host() is being modified; Host
-headers which contain potentially dangerous content (such as username/password
-pairs) now raise the exception django.core.exceptions.SuspiciousOperation
+To remedy this, the parsing in ``HttpRequest.get_host()`` is being modified;
+Host headers which contain potentially dangerous content (such as
+username/password pairs) now raise the exception
+:exc:`django.core.exceptions.SuspiciousOperation`.
 
 Details of this issue were initially posted online as a `security advisory`_.
 
diff --git a/lib/django-1.4/docs/releases/1.4.3.txt b/lib/django-1.4/docs/releases/1.4.3.txt
new file mode 100644
index 0000000..aadf623
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.3.txt
@@ -0,0 +1,60 @@
+==========================
+Django 1.4.3 release notes
+==========================
+
+*December 10, 2012*
+
+Django 1.4.3 addresses two security issues present in previous Django releases
+in the 1.4 series.
+
+Please be aware that this security release is slightly different from previous
+ones. Both issues addressed here have been dealt with in prior security updates
+to Django. In one case, we have received ongoing reports of problems, and in
+the other we've chosen to take further steps to tighten up Django's code in
+response to independent discovery of potential problems from multiple sources.
+
+Host header poisoning
+---------------------
+
+Several earlier Django security releases focused on the issue of poisoning the
+HTTP Host header, causing Django to generate URLs pointing to arbitrary,
+potentially-malicious domains.
+
+In response to further input received and reports of continuing issues
+following the previous release, we're taking additional steps to tighten Host
+header validation. Rather than attempt to accommodate all features HTTP
+supports here, Django's Host header validation attempts to support a smaller,
+but far more common, subset:
+
+* Hostnames must consist of characters [A-Za-z0-9] plus hyphen ('-') or dot
+  ('.').
+* IP addresses -- both IPv4 and IPv6 -- are permitted.
+* Port, if specified, is numeric.
+
+Any deviation from this will now be rejected, raising the exception
+:exc:`django.core.exceptions.SuspiciousOperation`.
+
+Redirect poisoning
+------------------
+
+Also following up on a previous issue: in July of this year, we made changes to
+Django's HTTP redirect classes, performing additional validation of the scheme
+of the URL to redirect to (since, both within Django's own supplied
+applications and many third-party applications, accepting a user-supplied
+redirect target is a common pattern).
+
+Since then, two independent audits of the code turned up further potential
+problems. So, similar to the Host-header issue, we are taking steps to provide
+tighter validation in response to reported problems (primarily with third-party
+applications, but to a certain extent also within Django itself). This comes in
+two parts:
+
+1. A new utility function, ``django.utils.http.is_safe_url``, is added; this
+function takes a URL and a hostname, and checks that the URL is either
+relative, or if absolute matches the supplied hostname. This function is
+intended for use whenever user-supplied redirect targets are accepted, to
+ensure that such redirects cannot lead to arbitrary third-party sites.
+
+2. All of Django's own built-in views -- primarily in the authentication system
+-- which allow user-supplied redirect targets now use ``is_safe_url`` to
+validate the supplied URL.
diff --git a/lib/django-1.4/docs/releases/1.4.4.txt b/lib/django-1.4/docs/releases/1.4.4.txt
new file mode 100644
index 0000000..c5fcbc3
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.4.txt
@@ -0,0 +1,88 @@
+==========================
+Django 1.4.4 release notes
+==========================
+
+*February 19, 2013*
+
+Django 1.4.4 fixes four security issues present in previous Django releases in
+the 1.4 series, as well as several other bugs and numerous documentation
+improvements.
+
+This is the fourth bugfix/security release in the Django 1.4 series.
+
+
+Host header poisoning
+---------------------
+
+Some parts of Django -- independent of end-user-written applications -- make
+use of full URLs, including domain name, which are generated from the HTTP Host
+header. Django's documentation has for some time contained notes advising users
+on how to configure webservers to ensure that only valid Host headers can reach
+the Django application. However, it has been reported to us that even with the
+recommended webserver configurations there are still techniques available for
+tricking many common webservers into supplying the application with an
+incorrect and possibly malicious Host header.
+
+For this reason, Django 1.4.4 adds a new setting, ``ALLOWED_HOSTS``, containing
+an explicit list of valid host/domain names for this site. A request with a
+Host header not matching an entry in this list will raise
+``SuspiciousOperation`` if ``request.get_host()`` is called. For full details
+see the documentation for the :setting:`ALLOWED_HOSTS` setting.
+
+The default value for this setting in Django 1.4.4 is ``['*']`` (matching any
+host), for backwards-compatibility, but we strongly encourage all sites to set
+a more restrictive value.
+
+This host validation is disabled when ``DEBUG`` is ``True`` or when running tests.
+
+
+XML deserialization
+-------------------
+
+The XML parser in the Python standard library is vulnerable to a number of
+attacks via external entities and entity expansion. Django uses this parser for
+deserializing XML-formatted database fixtures. This deserializer is not
+intended for use with untrusted data, but in order to err on the side of safety
+in Django 1.4.4 the XML deserializer refuses to parse an XML document with a
+DTD (DOCTYPE definition), which closes off these attack avenues.
+
+These issues in the Python standard library are CVE-2013-1664 and
+CVE-2013-1665. More information available `from the Python security team`_.
+
+Django's XML serializer does not create documents with a DTD, so this should
+not cause any issues with the typical round-trip from ``dumpdata`` to
+``loaddata``, but if you feed your own XML documents to the ``loaddata``
+management command, you will need to ensure they do not contain a DTD.
+
+.. _from the Python security team: http://blog.python.org/2013/02/announcing-defusedxml-fixes-for-xml.html
+
+
+Formset memory exhaustion
+-------------------------
+
+Previous versions of Django did not validate or limit the form-count data
+provided by the client in a formset's management form, making it possible to
+exhaust a server's available memory by forcing it to create very large numbers
+of forms.
+
+In Django 1.4.4, all formsets have a strictly-enforced maximum number of forms
+(1000 by default, though it can be set higher via the ``max_num`` formset
+factory argument).
+
+
+Admin history view information leakage
+--------------------------------------
+
+In previous versions of Django, an admin user without change permission on a
+model could still view the unicode representation of instances via their admin
+history log. Django 1.4.4 now limits the admin history log view for an object
+to users with change permission for that model.
+
+
+Other bugfixes and changes
+==========================
+
+* Prevented transaction state from leaking from one request to the next (#19707).
+* Changed a SQL command syntax to be MySQL 4 compatible (#19702).
+* Added backwards-compatibility with old unsalted MD5 passwords (#18144).
+* Numerous documentation improvements and fixes.
diff --git a/lib/django-1.4/docs/releases/1.4.5.txt b/lib/django-1.4/docs/releases/1.4.5.txt
new file mode 100644
index 0000000..9ba5235
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.5.txt
@@ -0,0 +1,13 @@
+==========================
+Django 1.4.5 release notes
+==========================
+
+*February 20, 2013*
+
+Django 1.4.5 corrects a packaging problem with yesterday's :doc:`1.4.4 release
+</releases/1.4.4>`.
+
+The release contained stray ``.pyc`` files that caused "bad magic number"
+errors when running with some versions of Python. This releases corrects this,
+and also fixes a bad documentation link in the project template ``settings.py``
+file generated by ``manage.py startproject``.
diff --git a/lib/django-1.4/docs/releases/1.4.6.txt b/lib/django-1.4/docs/releases/1.4.6.txt
new file mode 100644
index 0000000..575e9fa
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.6.txt
@@ -0,0 +1,31 @@
+==========================
+Django 1.4.6 release notes
+==========================
+
+*August 13, 2013*
+
+Django 1.4.6 fixes one security issue present in previous Django releases in
+the 1.4 series, as well as one other bug.
+
+This is the sixth bugfix/security release in the Django 1.4 series.
+
+Mitigated possible XSS attack via user-supplied redirect URLs
+-------------------------------------------------------------
+
+Django relies on user input in some cases (e.g.
+:func:`django.contrib.auth.views.login`, :mod:`django.contrib.comments`, and
+:doc:`i18n </topics/i18n/index>`) to redirect the user to an "on success" URL.
+The security checks for these redirects (namely
+``django.util.http.is_safe_url()``) didn't check if the scheme is ``http(s)``
+and as such allowed ``javascript:...`` URLs to be entered. If a developer
+relied on ``is_safe_url()`` to provide safe redirect targets and put such a
+URL into a link, he could suffer from a XSS attack. This bug doesn't affect
+Django currently, since we only put this URL into the ``Location`` response
+header and browsers seem to ignore JavaScript there.
+
+Bugfixes
+========
+
+* Fixed an obscure bug with the :func:`~django.test.utils.override_settings`
+  decorator. If you hit an ``AttributeError: 'Settings' object has no attribute
+  '_original_allowed_hosts'`` exception, it's probably fixed (#20636).
diff --git a/lib/django-1.4/docs/releases/1.4.7.txt b/lib/django-1.4/docs/releases/1.4.7.txt
new file mode 100644
index 0000000..64d3088
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.7.txt
@@ -0,0 +1,25 @@
+==========================
+Django 1.4.7 release notes
+==========================
+
+*September 10, 2013*
+
+Django 1.4.7 fixes one security issue present in previous Django releases in
+the 1.4 series.
+
+Directory traversal vulnerability in :ttag:`ssi` template tag
+-------------------------------------------------------------
+
+In previous versions of Django it was possible to bypass the
+:setting:`ALLOWED_INCLUDE_ROOTS` setting used for security with the :ttag:`ssi`
+template tag by specifying a relative path that starts with one of the allowed
+roots. For example, if ``ALLOWED_INCLUDE_ROOTS = ("/var/www",)`` the following
+would be possible:
+
+.. code-block:: html+django
+
+    {% ssi "/var/www/../../etc/passwd" %}
+
+In practice this is not a very common problem, as it would require the template
+author to put the :ttag:`ssi` file in a user-controlled variable, but it's
+possible in principle.
diff --git a/lib/django-1.4/docs/releases/1.4.8.txt b/lib/django-1.4/docs/releases/1.4.8.txt
new file mode 100644
index 0000000..08dca40
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.8.txt
@@ -0,0 +1,32 @@
+==========================
+Django 1.4.8 release notes
+==========================
+
+*September 14, 2013*
+
+Django 1.4.8 fixes two security issues present in previous Django releases in
+the 1.4 series.
+
+Denial-of-service via password hashers
+--------------------------------------
+
+In previous versions of Django, no limit was imposed on the plaintext
+length of a password. This allowed a denial-of-service attack through
+submission of bogus but extremely large passwords, tying up server
+resources performing the (expensive, and increasingly expensive with
+the length of the password) calculation of the corresponding hash.
+
+As of 1.4.8, Django's authentication framework imposes a 4096-byte
+limit on passwords and will fail authentication with any submitted
+password of greater length.
+
+Corrected usage of :func:`~django.views.decorators.debug.sensitive_post_parameters` in :mod:`django.contrib.auth`’s admin
+-------------------------------------------------------------------------------------------------------------------------
+
+The decoration of the ``add_view`` and ``user_change_password`` user admin
+views with :func:`~django.views.decorators.debug.sensitive_post_parameters`
+did not include :func:`~django.utils.decorators.method_decorator` (required
+since the views are methods) resulting in the decorator not being properly
+applied. This usage has been fixed and
+:func:`~django.views.decorators.debug.sensitive_post_parameters` will now
+throw an exception if it's improperly used.
diff --git a/lib/django-1.4/docs/releases/1.4.9.txt b/lib/django-1.4/docs/releases/1.4.9.txt
new file mode 100644
index 0000000..d7d79e7
--- /dev/null
+++ b/lib/django-1.4/docs/releases/1.4.9.txt
@@ -0,0 +1,21 @@
+==========================
+Django 1.4.9 release notes
+==========================
+
+*October 24, 2013*
+
+Django 1.4.9 fixes a security-related bug in the 1.4 series and one other
+data corruption bug.
+
+Readdressed denial-of-service via password hashers
+--------------------------------------------------
+
+Django 1.4.8 imposes a 4096-byte limit on passwords in order to mitigate a
+denial-of-service attack through submission of bogus but extremely large
+passwords. In Django 1.4.9, we've reverted this change and instead improved
+the speed of our PBKDF2 algorithm by not rehashing the key on every iteration.
+
+Bugfixes
+========
+
+* Fixed a data corruption bug with ``datetime_safe.datetime.combine`` (#21256).
diff --git a/lib/django-1.4/docs/releases/1.4.txt b/lib/django-1.4/docs/releases/1.4.txt
index a091869..2374eb6 100644
--- a/lib/django-1.4/docs/releases/1.4.txt
+++ b/lib/django-1.4/docs/releases/1.4.txt
@@ -507,10 +507,11 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-We added two function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, to allow designating the local variables
-and POST parameters that may contain sensitive information and should be
-filtered out of error reports.
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
+information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
 certain views (``login``, ``password_reset_confirm``, ``password_change`` and
diff --git a/lib/django-1.4/docs/releases/index.txt b/lib/django-1.4/docs/releases/index.txt
index 0b465a6..d699069 100644
--- a/lib/django-1.4/docs/releases/index.txt
+++ b/lib/django-1.4/docs/releases/index.txt
@@ -14,12 +14,22 @@
 Final releases
 ==============
 
-
 1.4 release
 -----------
 .. toctree::
    :maxdepth: 1
 
+   1.4.13
+   1.4.12
+   1.4.11
+   1.4.10
+   1.4.9
+   1.4.8
+   1.4.7
+   1.4.6
+   1.4.5
+   1.4.4
+   1.4.3
    1.4.2
    1.4.1
    1.4
@@ -29,6 +39,11 @@
 .. toctree::
    :maxdepth: 1
 
+   1.3.7
+   1.3.6
+   1.3.5
+   1.3.4
+   1.3.3
    1.3.2
    1.3.1
    1.3
diff --git a/lib/django-1.4/docs/topics/auth.txt b/lib/django-1.4/docs/topics/auth.txt
index efc6e78..23a4a0c 100644
--- a/lib/django-1.4/docs/topics/auth.txt
+++ b/lib/django-1.4/docs/topics/auth.txt
@@ -462,6 +462,17 @@
 That's it -- now your Django install will use Bcrypt as the default storage
 algorithm.
 
+.. admonition:: Password truncation with BCryptPasswordHasher
+
+    The designers of bcrypt truncate all passwords at 72 characters which means
+    that ``bcrypt(password_with_100_chars) == bcrypt(password_with_100_chars[:72])``.
+    ``BCryptPasswordHasher`` does not have any special handling and
+    thus is also subject to this hidden password length limit. The practical
+    ramification of this truncation is pretty marginal as the average user does
+    not have a password greater than 72 characters in length and even being
+    truncated at 72 the compute powered required to brute force bcrypt in any
+    useful amount of time is still astronomical.
+
 .. admonition:: Other bcrypt implementations
 
    There are several other implementations that allow bcrypt to be
@@ -1643,10 +1654,11 @@
 can also create permissions directly. For example, you can create the
 ``can_publish`` permission for a ``BlogPost`` model in ``myapp``::
 
+    from myapp.models import BlogPost
     from django.contrib.auth.models import Group, Permission
     from django.contrib.contenttypes.models import ContentType
 
-    content_type = ContentType.objects.get(app_label='myapp', model='BlogPost')
+    content_type = ContentType.objects.get_for_model(BlogPost)
     permission = Permission.objects.create(codename='can_publish',
                                            name='Can Publish Posts',
                                            content_type=content_type)
diff --git a/lib/django-1.4/docs/topics/cache.txt b/lib/django-1.4/docs/topics/cache.txt
index 99d764b..fa0a18c 100644
--- a/lib/django-1.4/docs/topics/cache.txt
+++ b/lib/django-1.4/docs/topics/cache.txt
@@ -1164,7 +1164,10 @@
   and ``Last-Modified`` headers.
 
 * :class:`django.middleware.gzip.GZipMiddleware` compresses responses for all
-  modern browsers, saving bandwidth and transfer time.
+  modern browsers, saving bandwidth and transfer time. Be warned, however,
+  that compression techniques like ``GZipMiddleware`` are subject to attacks.
+  See the warning in :class:`~django.middleware.gzip.GZipMiddleware` for
+  details.
 
 Order of MIDDLEWARE_CLASSES
 ===========================
diff --git a/lib/django-1.4/docs/topics/db/examples/many_to_many.txt b/lib/django-1.4/docs/topics/db/examples/many_to_many.txt
index 1ad89e7..35cbe65 100644
--- a/lib/django-1.4/docs/topics/db/examples/many_to_many.txt
+++ b/lib/django-1.4/docs/topics/db/examples/many_to_many.txt
@@ -35,7 +35,7 @@
 What follows are examples of operations that can be performed using the Python
 API facilities.
 
-Create a couple of Publications::
+Create a couple of ``Publications``::
 
     >>> p1 = Publication(title='The Python Journal')
     >>> p1.save()
@@ -44,11 +44,11 @@
     >>> p3 = Publication(title='Science Weekly')
     >>> p3.save()
 
-Create an Article::
+Create an ``Article``::
 
     >>> a1 = Article(headline='Django lets you build Web apps easily')
 
-You can't associate it with a Publication until it's been saved::
+You can't associate it with a ``Publication`` until it's been saved::
 
     >>> a1.publications.add(p1)
     Traceback (most recent call last):
@@ -60,11 +60,11 @@
 
     >>> a1.save()
 
-Associate the Article with a Publication::
+Associate the ``Article`` with a ``Publication``::
 
     >>> a1.publications.add(p1)
 
-Create another Article, and set it to appear in both Publications::
+Create another ``Article``, and set it to appear in both ``Publications``::
 
     >>> a2 = Article(headline='NASA uses Python')
     >>> a2.save()
@@ -75,25 +75,26 @@
 
     >>> a2.publications.add(p3)
 
-Adding an object of the wrong type raises TypeError::
+Adding an object of the wrong type raises :exc:`~exceptions.TypeError`::
 
     >>> a2.publications.add(a1)
     Traceback (most recent call last):
     ...
     TypeError: 'Publication' instance expected
 
-Add a Publication directly via publications.add by using keyword arguments::
+Create and add a ``Publication`` to an ``Article`` in one step using
+:meth:`~django.db.models.fields.related.RelatedManager.create`::
 
     >>> new_publication = a2.publications.create(title='Highlights for Children')
 
-Article objects have access to their related Publication objects::
+``Article`` objects have access to their related ``Publication`` objects::
 
     >>> a1.publications.all()
     [<Publication: The Python Journal>]
     >>> a2.publications.all()
     [<Publication: Highlights for Children>, <Publication: Science News>, <Publication: Science Weekly>, <Publication: The Python Journal>]
 
-Publication objects have access to their related Article objects::
+``Publication`` objects have access to their related ``Article`` objects::
 
     >>> p2.article_set.all()
     [<Article: NASA uses Python>]
@@ -102,7 +103,8 @@
     >>> Publication.objects.get(id=4).article_set.all()
     [<Article: NASA uses Python>]
 
-Many-to-many relationships can be queried using :ref:`lookups across relationships <lookups-that-span-relationships>`::
+Many-to-many relationships can be queried using :ref:`lookups across
+relationships <lookups-that-span-relationships>`::
 
     >>> Article.objects.filter(publications__id__exact=1)
     [<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
@@ -119,7 +121,8 @@
     >>> Article.objects.filter(publications__title__startswith="Science").distinct()
     [<Article: NASA uses Python>]
 
-The count() function respects distinct() as well::
+The :meth:`~django.db.models.query.QuerySet.count` function respects
+:meth:`~django.db.models.query.QuerySet.distinct` as well::
 
     >>> Article.objects.filter(publications__title__startswith="Science").count()
     2
@@ -133,7 +136,7 @@
     [<Article: Django lets you build Web apps easily>, <Article: NASA uses Python>]
 
 Reverse m2m queries are supported (i.e., starting at the table that doesn't have
-a ManyToManyField)::
+a :class:`~django.db.models.ManyToManyField`)::
 
     >>> Publication.objects.filter(id__exact=1)
     [<Publication: The Python Journal>]
@@ -163,7 +166,7 @@
     >>> Article.objects.exclude(publications=p2)
     [<Article: Django lets you build Web apps easily>]
 
-If we delete a Publication, its Articles won't be able to access it::
+If we delete a ``Publication``, its ``Articles`` won't be able to access it::
 
     >>> p1.delete()
     >>> Publication.objects.all()
@@ -172,7 +175,7 @@
     >>> a1.publications.all()
     []
 
-If we delete an Article, its Publications won't be able to access it::
+If we delete an ``Article``, its ``Publications`` won't be able to access it::
 
     >>> a2.delete()
     >>> Article.objects.all()
@@ -199,7 +202,7 @@
     >>> a5.publications.all()
     [<Publication: Science News>]
 
-Removing publication from an article::
+Removing ``Publication`` from an ``Article``::
 
     >>> a4.publications.remove(p2)
     >>> p2.article_set.all()
@@ -242,7 +245,7 @@
     >>> p2.article_set.all()
     [<Article: Oxygen-free diet works wonders>]
 
-Recreate the article and Publication we have deleted::
+Recreate the ``Article`` and ``Publication`` we have deleted::
 
     >>> p1 = Publication(title='The Python Journal')
     >>> p1.save()
@@ -250,7 +253,8 @@
     >>> a2.save()
     >>> a2.publications.add(p1, p2, p3)
 
-Bulk delete some Publications - references to deleted publications should go::
+Bulk delete some ``Publications`` - references to deleted publications should
+go::
 
     >>> Publication.objects.filter(title__startswith='Science').delete()
     >>> Publication.objects.all()
@@ -267,15 +271,18 @@
     [<Article: Django lets you build Web apps easily>]
     >>> q.delete()
 
-After the delete, the QuerySet cache needs to be cleared, and the referenced
-objects should be gone::
+After the :meth:`~django.db.models.query.QuerySet.delete`, the
+:class:`~django.db.models.query.QuerySet` cache needs to be cleared, and the
+referenced objects should be gone::
 
     >>> print q
     []
     >>> p1.article_set.all()
     [<Article: NASA uses Python>]
 
-An alternate to calling clear() is to assign the empty set::
+An alternate to calling
+:meth:`~django.db.models.fields.related.RelatedManager.clear` is to assign the
+empty set::
 
     >>> p1.article_set = []
     >>> p1.article_set.all()
diff --git a/lib/django-1.4/docs/topics/db/managers.txt b/lib/django-1.4/docs/topics/db/managers.txt
index eda6f9d..086f9dd 100644
--- a/lib/django-1.4/docs/topics/db/managers.txt
+++ b/lib/django-1.4/docs/topics/db/managers.txt
@@ -85,7 +85,7 @@
         objects = PollManager()
 
     class Response(models.Model):
-        poll = models.ForeignKey(Poll)
+        poll = models.ForeignKey(OpinionPoll)
         person_name = models.CharField(max_length=50)
         response = models.TextField()
 
diff --git a/lib/django-1.4/docs/topics/db/models.txt b/lib/django-1.4/docs/topics/db/models.txt
index a0b7416..a9968ac 100644
--- a/lib/django-1.4/docs/topics/db/models.txt
+++ b/lib/django-1.4/docs/topics/db/models.txt
@@ -66,13 +66,13 @@
 your ``models.py``.
 
 For example, if the models for your application live in the module
-``mysite.myapp.models`` (the package structure that is created for an
+``myapp.models`` (the package structure that is created for an
 application by the :djadmin:`manage.py startapp <startapp>` script),
 :setting:`INSTALLED_APPS` should read, in part::
 
     INSTALLED_APPS = (
         #...
-        'mysite.myapp',
+        'myapp',
         #...
     )
 
diff --git a/lib/django-1.4/docs/topics/db/queries.txt b/lib/django-1.4/docs/topics/db/queries.txt
index 2e14abe..ed38d1d 100644
--- a/lib/django-1.4/docs/topics/db/queries.txt
+++ b/lib/django-1.4/docs/topics/db/queries.txt
@@ -815,7 +815,7 @@
     The `OR lookups examples`_ in the Django unit tests show some possible uses
     of ``Q``.
 
-    .. _OR lookups examples: https://code.djangoproject.com/browser/django/trunk/tests/modeltests/or_lookups/tests.py
+    .. _OR lookups examples: https://github.com/django/django/blob/stable/1.4.x/tests/modeltests/or_lookups/tests.py
 
 Comparing objects
 =================
diff --git a/lib/django-1.4/docs/topics/db/sql.txt b/lib/django-1.4/docs/topics/db/sql.txt
index 80038e5..d387ad5 100644
--- a/lib/django-1.4/docs/topics/db/sql.txt
+++ b/lib/django-1.4/docs/topics/db/sql.txt
@@ -69,6 +69,16 @@
     database, but does nothing to enforce that. If the query does not
     return rows, a (possibly cryptic) error will result.
 
+.. warning::
+
+    If you are performing queries on MySQL, note that MySQL's silent type coercion
+    may cause unexpected results when mixing types. If you query on a string
+    type column, but with an integer value, MySQL will coerce the types of all values
+    in the table to an integer before performing the comparison. For example, if your
+    table contains the values ``'abc'``, ``'def'`` and you query for ``WHERE mycolumn=0``,
+    both rows will match. To prevent this, perform the correct typecasting
+    before using the value in a query.
+
 Mapping query fields to model fields
 ------------------------------------
 
diff --git a/lib/django-1.4/docs/topics/forms/formsets.txt b/lib/django-1.4/docs/topics/forms/formsets.txt
index b524c24..03fa317 100644
--- a/lib/django-1.4/docs/topics/forms/formsets.txt
+++ b/lib/django-1.4/docs/topics/forms/formsets.txt
@@ -108,8 +108,10 @@
 objects, up to ``extra`` additional blank forms will be added to the formset,
 so long as the total number of forms does not exceed ``max_num``.
 
-A ``max_num`` value of ``None`` (the default) puts no limit on the number of
-forms displayed. Please note that the default value of ``max_num`` was changed
+A ``max_num`` value of ``None`` (the default) puts a high limit on the number
+of forms displayed (1000). In practice this is equivalent to no limit.
+
+Please note that the default value of ``max_num`` was changed
 from ``0`` to ``None`` in version 1.2 to allow ``0`` as a valid value.
 
 Formset validation
diff --git a/lib/django-1.4/docs/topics/forms/modelforms.txt b/lib/django-1.4/docs/topics/forms/modelforms.txt
index 091073f..987d087 100644
--- a/lib/django-1.4/docs/topics/forms/modelforms.txt
+++ b/lib/django-1.4/docs/topics/forms/modelforms.txt
@@ -71,7 +71,7 @@
 
 ``FileField``                    ``FileField``
 
-``FilePathField``                ``CharField``
+``FilePathField``                ``FilePathField``
 
 ``FloatField``                   ``FloatField``
 
@@ -225,11 +225,6 @@
     # Save a new Article object from the form's data.
     >>> new_article = f.save()
 
-    # Create a form to edit an existing Article.
-    >>> a = Article.objects.get(pk=1)
-    >>> f = ArticleForm(instance=a)
-    >>> f.save()
-
     # Create a form to edit an existing Article, but use
     # POST data to populate the form.
     >>> a = Article.objects.get(pk=1)
@@ -673,6 +668,12 @@
 need to call ``formset.save_m2m()`` to ensure the many-to-many relationships
 are saved properly.
 
+.. note::
+
+    While calling ``formset.save(commit=False)`` does not save new or changed
+    objects to the database, it *does* delete objects that have been marked for
+    deletion. This behavior will be corrected in Django 1.7.
+
 .. _model-formsets-max-num:
 
 Limiting the number of editable objects
@@ -708,8 +709,8 @@
 
 .. versionchanged:: 1.2
 
-A ``max_num`` value of ``None`` (the default) puts no limit on the number of
-forms displayed.
+A ``max_num`` value of ``None`` (the default) puts a high limit on the number
+of forms displayed (1000). In practice this is equivalent to no limit.
 
 Using a model formset in a view
 -------------------------------
diff --git a/lib/django-1.4/docs/topics/security.txt b/lib/django-1.4/docs/topics/security.txt
index 0b51128..2a784bc 100644
--- a/lib/django-1.4/docs/topics/security.txt
+++ b/lib/django-1.4/docs/topics/security.txt
@@ -149,48 +149,40 @@
 
 .. _additional-security-topics:
 
-Host headers and virtual hosting
-================================
+Host header validation
+======================
 
-Django uses the ``Host`` header provided by the client to construct URLs
-in certain cases. While these values are sanitized to prevent Cross
-Site Scripting attacks, they can be used for Cross-Site Request
-Forgery and cache poisoning attacks in some circumstances. We
-recommend you ensure your Web server is configured such that:
+Django uses the ``Host`` header provided by the client to construct URLs in
+certain cases. While these values are sanitized to prevent Cross Site Scripting
+attacks, a fake ``Host`` value can be used for Cross-Site Request Forgery,
+cache poisoning attacks, and poisoning links in emails.
 
-    * It always validates incoming HTTP ``Host`` headers against the expected
-      host name.
-    * Disallows requests with no ``Host`` header.
-    * Is *not* configured with a catch-all virtual host that forwards requests
-      to a Django application.
+Because even seemingly-secure webserver configurations are susceptible to fake
+``Host`` headers, Django validates ``Host`` headers against the
+:setting:`ALLOWED_HOSTS` setting in the
+:meth:`django.http.HttpRequest.get_host()` method.
+
+This validation only applies via :meth:`~django.http.HttpRequest.get_host()`;
+if your code accesses the ``Host`` header directly from ``request.META`` you
+are bypassing this security protection.
+
+For more details see the full :setting:`ALLOWED_HOSTS` documentation.
+
+.. warning::
+
+   Previous versions of this document recommended configuring your webserver to
+   ensure it validates incoming HTTP ``Host`` headers. While this is still
+   recommended, in many common webservers a configuration that seems to
+   validate the ``Host`` header may not in fact do so. For instance, even if
+   Apache is configured such that your Django site is served from a non-default
+   virtual host with the ``ServerName`` set, it is still possible for an HTTP
+   request to match this virtual host and supply a fake ``Host`` header. Thus,
+   Django now requires that you set :setting:`ALLOWED_HOSTS` explicitly rather
+   than relying on webserver configuration.
 
 Additionally, as of 1.3.1, Django requires you to explicitly enable support for
-the ``X-Forwarded-Host`` header if your configuration requires it.
-
-Configuration for Apache
-------------------------
-
-The easiest way to get the described behavior in Apache is as follows. Create
-a `virtual host`_ using the ServerName_ and ServerAlias_ directives to restrict
-the domains Apache reacts to. Please keep in mind that while the directives do
-support ports the match is only performed against the hostname. This means that
-the ``Host`` header could still contain a port pointing to another webserver on
-the same machine. The next step is to make sure that your newly created virtual
-host is not also the default virtual host. Apache uses the first virtual host
-found in the configuration file as default virtual host.  As such you have to
-ensure that you have another virtual host which will act as catch-all virtual
-host. Just add one if you do not have one already, there is nothing special
-about it aside from ensuring it is the first virtual host in the configuration
-file. Debian/Ubuntu users usually don't have to take any action, since Apache
-ships with a default virtual host in ``sites-available`` which is linked into
-``sites-enabled`` as ``000-default`` and included from ``apache2.conf``. Just
-make sure not to name your site ``000-abc``, since files are included in
-alphabetical order.
-
-.. _virtual host: http://httpd.apache.org/docs/2.2/vhosts/
-.. _ServerName: http://httpd.apache.org/docs/2.2/mod/core.html#servername
-.. _ServerAlias: http://httpd.apache.org/docs/2.2/mod/core.html#serveralias
-
+the ``X-Forwarded-Host`` header (via the :setting:`USE_X_FORWARDED_HOST`
+setting) if your configuration requires it.
 
 
 
diff --git a/lib/django-1.4/extras/Makefile b/lib/django-1.4/extras/Makefile
new file mode 100644
index 0000000..ff14f40
--- /dev/null
+++ b/lib/django-1.4/extras/Makefile
@@ -0,0 +1,9 @@
+all: sdist bdist_wheel
+
+sdist:
+	python setup.py sdist
+
+bdist_wheel:
+	python -c "import setuptools;__file__='setup.py';exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" bdist_wheel
+
+.PHONY : sdist bdist_wheel
diff --git a/lib/django-1.4/setup.py b/lib/django-1.4/setup.py
index e9ffb0c..4bc36b0 100644
--- a/lib/django-1.4/setup.py
+++ b/lib/django-1.4/setup.py
@@ -75,7 +75,7 @@
     author = 'Django Software Foundation',
     author_email = 'foundation@djangoproject.com',
     description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
-    download_url = 'https://www.djangoproject.com/m/releases/1.4/Django-1.4.3.tar.gz',
+    download_url = 'https://www.djangoproject.com/m/releases/1.4/Django-1.4.13.tar.gz',
     packages = packages,
     cmdclass = cmdclasses,
     data_files = data_files,
diff --git a/lib/django-1.4/tests/modeltests/prefetch_related/tests.py b/lib/django-1.4/tests/modeltests/prefetch_related/tests.py
index f48630a..7f494fe 100644
--- a/lib/django-1.4/tests/modeltests/prefetch_related/tests.py
+++ b/lib/django-1.4/tests/modeltests/prefetch_related/tests.py
@@ -364,7 +364,9 @@
             l = [a.authorwithage for a in Author.objects.prefetch_related('authorwithage')]
 
         # Regression for #18090: the prefetching query must include an IN clause.
-        self.assertIn('authorwithage', connection.queries[-1]['sql'])
+        # Note that on Oracle the table name is upper case in the generated SQL,
+        # thus the .lower() call.
+        self.assertIn('authorwithage', connection.queries[-1]['sql'].lower())
         self.assertIn(' IN ', connection.queries[-1]['sql'])
 
         self.assertEqual(l, [a.authorwithage for a in Author.objects.all()])
diff --git a/lib/django-1.4/tests/modeltests/validation/tests.py b/lib/django-1.4/tests/modeltests/validation/tests.py
index 5c94f43..3078089 100644
--- a/lib/django-1.4/tests/modeltests/validation/tests.py
+++ b/lib/django-1.4/tests/modeltests/validation/tests.py
@@ -85,6 +85,7 @@
         mtv = ModelToValidate(number=10, name='Some Name', url_verify='http://qa-dev.w3.org/link-testsuite/http.php?code=301') #example.com is a redirect to iana.org now
         self.assertEqual(None, mtv.full_clean()) # This will fail if there's no Internet connection
 
+    @verify_exists_urls(existing_urls=())
     def test_correct_https_url_but_nonexisting(self):
         mtv = ModelToValidate(number=10, name='Some Name', url_verify='https://www.example.com/')
         self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url_verify', [u'This URL appears to be a broken link.'])
diff --git a/lib/django-1.4/tests/regressiontests/admin_filters/tests.py b/lib/django-1.4/tests/regressiontests/admin_filters/tests.py
index c75ba6c..7eb0c6a 100644
--- a/lib/django-1.4/tests/regressiontests/admin_filters/tests.py
+++ b/lib/django-1.4/tests/regressiontests/admin_filters/tests.py
@@ -82,11 +82,11 @@
     parameter_name = 'department'
 
     def lookups(self, request, model_admin):
-        return set([
+        return sorted(set([
             (employee.department.id,  # Intentionally not a string (Refs #19318)
              employee.department.code)
             for employee in model_admin.queryset(request).all()
-        ])
+        ]))
 
     def queryset(self, request, queryset):
         if self.value():
@@ -681,10 +681,9 @@
         filterspec = changelist.get_filters(request)[0][-1]
         self.assertEqual(force_unicode(filterspec.title), u'department')
         choices = list(filterspec.choices(changelist))
-
-        self.assertEqual(choices[2]['display'], u'DEV')
-        self.assertEqual(choices[2]['selected'], True)
-        self.assertEqual(choices[2]['query_string'], '?department=%s' % self.john.pk)
+        self.assertEqual(choices[1]['display'], 'DEV')
+        self.assertEqual(choices[1]['selected'], True)
+        self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.pk)
 
     def test_fk_with_to_field(self):
         """
diff --git a/lib/django-1.4/tests/regressiontests/admin_inlines/tests.py b/lib/django-1.4/tests/regressiontests/admin_inlines/tests.py
index 8b620cc..27c6431 100644
--- a/lib/django-1.4/tests/regressiontests/admin_inlines/tests.py
+++ b/lib/django-1.4/tests/regressiontests/admin_inlines/tests.py
@@ -445,15 +445,7 @@
         self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
 
         self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
-
-        try:
-            # Wait for the next page to be loaded.
-            self.wait_loaded_tag('body')
-        except TimeoutException:
-            # IE7 occasionnally returns an error "Internet Explorer cannot
-            # display the webpage" and doesn't load the next page. We just
-            # ignore it.
-            pass
+        self.wait_page_loaded()
 
         # Check that the objects have been created in the database
         self.assertEqual(ProfileCollection.objects.all().count(), 1)
@@ -502,4 +494,4 @@
     webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
 
 class SeleniumIETests(SeleniumFirefoxTests):
-    webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
\ No newline at end of file
+    webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
diff --git a/lib/django-1.4/tests/regressiontests/admin_views/tests.py b/lib/django-1.4/tests/regressiontests/admin_views/tests.py
index 8835e81..b695453 100644
--- a/lib/django-1.4/tests/regressiontests/admin_views/tests.py
+++ b/lib/django-1.4/tests/regressiontests/admin_views/tests.py
@@ -1064,6 +1064,46 @@
             self.assertContains(request, 'login-form')
             self.client.get('/test_admin/admin/logout/')
 
+    def testHistoryView(self):
+        """History view should restrict access."""
+
+        # add user shoud not be able to view the list of article or change any of them
+        self.client.get('/test_admin/admin/')
+        self.client.post('/test_admin/admin/', self.adduser_login)
+        response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
+        self.assertEqual(response.status_code, 403)
+        self.client.get('/test_admin/admin/logout/')
+
+        # change user can view all items and edit them
+        self.client.get('/test_admin/admin/')
+        self.client.post('/test_admin/admin/', self.changeuser_login)
+        response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
+        self.assertEqual(response.status_code, 200)
+
+        # Test redirection when using row-level change permissions. Refs #11513.
+        RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
+        RowLevelChangePermissionModel.objects.create(id=2, name="even id")
+        for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
+            self.client.post('/test_admin/admin/', login_dict)
+            response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/')
+            self.assertEqual(response.status_code, 403)
+
+            response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/')
+            self.assertEqual(response.status_code, 200)
+
+            self.client.get('/test_admin/admin/logout/')
+
+        for login_dict in [self.joepublic_login, self.no_username_login]:
+            self.client.post('/test_admin/admin/', login_dict)
+            response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/1/history/')
+            self.assertEqual(response.status_code, 200)
+            self.assertContains(response, 'login-form')
+            response = self.client.get('/test_admin/admin/admin_views/rowlevelchangepermissionmodel/2/history/')
+            self.assertEqual(response.status_code, 200)
+            self.assertContains(response, 'login-form')
+
+            self.client.get('/test_admin/admin/logout/')
+
     def testConditionallyShowAddSectionLink(self):
         """
         The foreign key widget should only show the "add related" button if the
@@ -2994,16 +3034,7 @@
 
         # Save and check that everything is properly stored in the database
         self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
-
-        try:
-            # Wait for the next page to be loaded.
-            self.wait_loaded_tag('body')
-        except TimeoutException:
-            # IE7 occasionnally returns an error "Internet Explorer cannot
-            # display the webpage" and doesn't load the next page. We just
-            # ignore it.
-            pass
-
+        self.wait_page_loaded()
         self.assertEqual(MainPrepopulated.objects.all().count(), 1)
         MainPrepopulated.objects.get(
             name=u' this is the mAin nÀMë and it\'s awεšome',
diff --git a/lib/django-1.4/tests/regressiontests/admin_widgets/tests.py b/lib/django-1.4/tests/regressiontests/admin_widgets/tests.py
index 87e0309..974f4d1 100644
--- a/lib/django-1.4/tests/regressiontests/admin_widgets/tests.py
+++ b/lib/django-1.4/tests/regressiontests/admin_widgets/tests.py
@@ -597,12 +597,14 @@
         self.selenium.get(
             '%s%s' % (self.live_server_url, '/admin_widgets/school/%s/' % self.school.id))
 
+        self.wait_page_loaded()
         self.execute_basic_operations('vertical', 'students')
         self.execute_basic_operations('horizontal', 'alumni')
 
         # Save and check that everything is properly stored in the database ---
         self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
-        self.school = models.School.objects.get(id=self.school.id) # Reload from database
+        self.wait_page_loaded()
+        self.school = models.School.objects.get(id=self.school.id)  # Reload from database
         self.assertEqual(list(self.school.students.all()),
                          [self.arthur, self.cliff, self.jason, self.john])
         self.assertEqual(list(self.school.alumni.all()),
@@ -681,6 +683,7 @@
 
         # Save and check that everything is properly stored in the database ---
         self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
+        self.wait_page_loaded()
         self.school = models.School.objects.get(id=self.school.id) # Reload from database
         self.assertEqual(list(self.school.students.all()),
                          [self.jason, self.peter])
@@ -691,4 +694,4 @@
     webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
 
 class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
-    webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
\ No newline at end of file
+    webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
diff --git a/lib/django-1.4/tests/regressiontests/aggregation_regress/tests.py b/lib/django-1.4/tests/regressiontests/aggregation_regress/tests.py
index 36a54c0..11ad6ac 100644
--- a/lib/django-1.4/tests/regressiontests/aggregation_regress/tests.py
+++ b/lib/django-1.4/tests/regressiontests/aggregation_regress/tests.py
@@ -587,10 +587,9 @@
         )
 
         publishers = publishers.annotate(n_books=Count("book"))
-        self.assertEqual(
-            publishers[0].n_books,
-            2
-        )
+        sorted_publishers = sorted(publishers, key=lambda x: x.name)
+        self.assertEqual(sorted_publishers[0].n_books, 2)
+        self.assertEqual(sorted_publishers[1].n_books, 1)
 
         self.assertEqual(
             sorted(p.name for p in publishers),
diff --git a/lib/django-1.4/tests/regressiontests/cache/tests.py b/lib/django-1.4/tests/regressiontests/cache/tests.py
index bd29cde..b0be259 100644
--- a/lib/django-1.4/tests/regressiontests/cache/tests.py
+++ b/lib/django-1.4/tests/regressiontests/cache/tests.py
@@ -17,10 +17,12 @@
 from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
 from django.core.cache.backends.base import (CacheKeyWarning,
     InvalidCacheBackendError)
+from django.core.context_processors import csrf
 from django.db import router
 from django.http import HttpResponse, HttpRequest, QueryDict
 from django.middleware.cache import (FetchFromCacheMiddleware,
     UpdateCacheMiddleware, CacheMiddleware)
+from django.middleware.csrf import CsrfViewMiddleware
 from django.template import Template
 from django.template.response import TemplateResponse
 from django.test import TestCase, TransactionTestCase, RequestFactory
@@ -1418,6 +1420,10 @@
     return HttpResponse('Hello World %s' % value)
 
 
+def csrf_view(request):
+    return HttpResponse(csrf(request)['csrf_token'])
+
+
 class CacheMiddlewareTest(TestCase):
 
     def setUp(self):
@@ -1635,6 +1641,27 @@
         response = other_with_timeout_view(request, '18')
         self.assertEqual(response.content, 'Hello World 18')
 
+    def test_sensitive_cookie_not_cached(self):
+        """
+        Django must prevent caching of responses that set a user-specific (and
+        maybe security sensitive) cookie in response to a cookie-less request.
+        """
+        csrf_middleware = CsrfViewMiddleware()
+        cache_middleware = CacheMiddleware()
+
+        request = self.factory.get('/view/')
+        self.assertIsNone(cache_middleware.process_request(request))
+
+        csrf_middleware.process_view(request, csrf_view, (), {})
+
+        response = csrf_view(request)
+
+        response = csrf_middleware.process_response(request, response)
+        response = cache_middleware.process_response(request, response)
+
+        # Inserting a CSRF cookie in a cookie-less request prevented caching.
+        self.assertIsNone(cache_middleware.process_request(request))
+
 CacheMiddlewareTest = override_settings(
         CACHE_MIDDLEWARE_ALIAS='other',
         CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
diff --git a/lib/django-1.4/tests/regressiontests/comment_tests/tests/moderation_view_tests.py b/lib/django-1.4/tests/regressiontests/comment_tests/tests/moderation_view_tests.py
index 54f3f3a..f6fb4e2 100644
--- a/lib/django-1.4/tests/regressiontests/comment_tests/tests/moderation_view_tests.py
+++ b/lib/django-1.4/tests/regressiontests/comment_tests/tests/moderation_view_tests.py
@@ -38,7 +38,7 @@
         self.client.login(username="normaluser", password="normaluser")
         response = self.client.post("/flag/%d/" % pk, {'next': "/go/here/"})
         self.assertEqual(response["Location"],
-            "http://testserver/go/here/?c=1")
+            "http://testserver/go/here/?c=%d" % pk)
 
     def testFlagPostUnsafeNext(self):
         """
@@ -135,7 +135,7 @@
         self.client.login(username="normaluser", password="normaluser")
         response = self.client.post("/delete/%d/" % pk, {'next': "/go/here/"})
         self.assertEqual(response["Location"],
-            "http://testserver/go/here/?c=1")
+            "http://testserver/go/here/?c=%d" % pk)
 
     def testDeletePostUnsafeNext(self):
         """
@@ -209,7 +209,7 @@
         response = self.client.post("/approve/%d/" % c1.pk,
             {'next': "/go/here/"})
         self.assertEqual(response["Location"],
-            "http://testserver/go/here/?c=1")
+            "http://testserver/go/here/?c=%d" % c1.pk)
 
     def testApprovePostUnsafeNext(self):
         """
diff --git a/lib/django-1.4/tests/regressiontests/csrf_tests/tests.py b/lib/django-1.4/tests/regressiontests/csrf_tests/tests.py
index 71400ea..a605134 100644
--- a/lib/django-1.4/tests/regressiontests/csrf_tests/tests.py
+++ b/lib/django-1.4/tests/regressiontests/csrf_tests/tests.py
@@ -7,6 +7,7 @@
 from django.middleware.csrf import CsrfViewMiddleware, CSRF_KEY_LENGTH
 from django.template import RequestContext, Template
 from django.test import TestCase
+from django.test.utils import override_settings
 from django.views.decorators.csrf import csrf_exempt, requires_csrf_token, ensure_csrf_cookie
 
 
@@ -267,6 +268,7 @@
         csrf_cookie = resp2.cookies[settings.CSRF_COOKIE_NAME]
         self._check_token_present(resp, csrf_id=csrf_cookie.value)
 
+    @override_settings(ALLOWED_HOSTS=['www.example.com'])
     def test_https_bad_referer(self):
         """
         Test that a POST HTTPS request with a bad referer is rejected
@@ -279,6 +281,7 @@
         self.assertNotEqual(None, req2)
         self.assertEqual(403, req2.status_code)
 
+    @override_settings(ALLOWED_HOSTS=['www.example.com'])
     def test_https_good_referer(self):
         """
         Test that a POST HTTPS request with a good referer is accepted
@@ -290,6 +293,7 @@
         req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {})
         self.assertEqual(None, req2)
 
+    @override_settings(ALLOWED_HOSTS=['www.example.com'])
     def test_https_good_referer_2(self):
         """
         Test that a POST HTTPS request with a good referer is accepted
diff --git a/lib/django-1.4/tests/regressiontests/forms/tests/formsets.py b/lib/django-1.4/tests/regressiontests/forms/tests/formsets.py
index 05ef978..7c69e7e 100644
--- a/lib/django-1.4/tests/regressiontests/forms/tests/formsets.py
+++ b/lib/django-1.4/tests/regressiontests/forms/tests/formsets.py
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-from django.forms import Form, CharField, IntegerField, ValidationError, DateField
+from django.forms import Form, CharField, IntegerField, ValidationError, DateField, formsets
 from django.forms.formsets import formset_factory, BaseFormSet
 from django.test import TestCase
 
@@ -47,7 +47,7 @@
         # for adding data. By default, it displays 1 blank form. It can display more,
         # but we'll look at how to do so later.
         formset = ChoiceFormSet(auto_id=False, prefix='choices')
-        self.assertHTMLEqual(str(formset), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" />
+        self.assertHTMLEqual(str(formset), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="1000" />
 <tr><th>Choice:</th><td><input type="text" name="choices-0-choice" /></td></tr>
 <tr><th>Votes:</th><td><input type="text" name="choices-0-votes" /></td></tr>""")
 
@@ -650,8 +650,8 @@
         # Limiting the maximum number of forms ########################################
         # Base case for max_num.
 
-        # When not passed, max_num will take its default value of None, i.e. unlimited
-        # number of forms, only controlled by the value of the extra parameter.
+        # When not passed, max_num will take a high default value, leaving the
+        # number of forms only controlled by the value of the extra parameter.
 
         LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3)
         formset = LimitedFavoriteDrinkFormSet()
@@ -698,8 +698,8 @@
     def test_max_num_with_initial_data(self):
         # max_num with initial data
 
-        # When not passed, max_num will take its default value of None, i.e. unlimited
-        # number of forms, only controlled by the values of the initial and extra
+        # When not passed, max_num will take a high default value, leaving the
+        # number of forms only controlled by the value of the initial and extra
         # parameters.
 
         initial = [
@@ -844,6 +844,64 @@
         self.assertEqual(len(formset.forms), 0)
         self.assertTrue(formset)
 
+    def test_hard_limit_on_instantiated_forms(self):
+        """A formset has a hard limit on the number of forms instantiated."""
+        # reduce the default limit of 1000 temporarily for testing
+        _old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
+        try:
+            formsets.DEFAULT_MAX_NUM = 3
+            ChoiceFormSet = formset_factory(Choice)
+            # someone fiddles with the mgmt form data...
+            formset = ChoiceFormSet(
+                {
+                    'choices-TOTAL_FORMS': '4',
+                    'choices-INITIAL_FORMS': '0',
+                    'choices-MAX_NUM_FORMS': '4',
+                    'choices-0-choice': 'Zero',
+                    'choices-0-votes': '0',
+                    'choices-1-choice': 'One',
+                    'choices-1-votes': '1',
+                    'choices-2-choice': 'Two',
+                    'choices-2-votes': '2',
+                    'choices-3-choice': 'Three',
+                    'choices-3-votes': '3',
+                    },
+                prefix='choices',
+                )
+            # But we still only instantiate 3 forms
+            self.assertEqual(len(formset.forms), 3)
+        finally:
+            formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
+
+    def test_increase_hard_limit(self):
+        """Can increase the built-in forms limit via a higher max_num."""
+        # reduce the default limit of 1000 temporarily for testing
+        _old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
+        try:
+            formsets.DEFAULT_MAX_NUM = 3
+            # for this form, we want a limit of 4
+            ChoiceFormSet = formset_factory(Choice, max_num=4)
+            formset = ChoiceFormSet(
+                {
+                    'choices-TOTAL_FORMS': '4',
+                    'choices-INITIAL_FORMS': '0',
+                    'choices-MAX_NUM_FORMS': '4',
+                    'choices-0-choice': 'Zero',
+                    'choices-0-votes': '0',
+                    'choices-1-choice': 'One',
+                    'choices-1-votes': '1',
+                    'choices-2-choice': 'Two',
+                    'choices-2-votes': '2',
+                    'choices-3-choice': 'Three',
+                    'choices-3-votes': '3',
+                    },
+                prefix='choices',
+                )
+            # This time four forms are instantiated
+            self.assertEqual(len(formset.forms), 4)
+        finally:
+            formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
+
 
 data = {
     'choices-TOTAL_FORMS': '1', # the number of forms rendered
diff --git a/lib/django-1.4/tests/regressiontests/generic_inline_admin/tests.py b/lib/django-1.4/tests/regressiontests/generic_inline_admin/tests.py
index db81eec..237e396 100644
--- a/lib/django-1.4/tests/regressiontests/generic_inline_admin/tests.py
+++ b/lib/django-1.4/tests/regressiontests/generic_inline_admin/tests.py
@@ -7,6 +7,7 @@
 from django.contrib.admin.sites import AdminSite
 from django.contrib.contenttypes.generic import (
     generic_inlineformset_factory, GenericTabularInline)
+from django.forms.formsets import DEFAULT_MAX_NUM
 from django.forms.models import ModelForm
 from django.test import TestCase
 
@@ -241,7 +242,7 @@
 
         # Create a formset with default arguments
         formset = media_inline.get_formset(request)
-        self.assertEqual(formset.max_num, None)
+        self.assertEqual(formset.max_num, DEFAULT_MAX_NUM)
         self.assertEqual(formset.can_order, False)
 
         # Create a formset with custom keyword arguments
diff --git a/lib/django-1.4/tests/regressiontests/initial_sql_regress/sql/simple.sql b/lib/django-1.4/tests/regressiontests/initial_sql_regress/sql/simple.sql
index ca9bd40..254abf2 100644
--- a/lib/django-1.4/tests/regressiontests/initial_sql_regress/sql/simple.sql
+++ b/lib/django-1.4/tests/regressiontests/initial_sql_regress/sql/simple.sql
@@ -4,5 +4,5 @@
 INSERT INTO initial_sql_regress_simple (name) VALUES ('George');
 INSERT INTO initial_sql_regress_simple (name) VALUES ('Miles O''Brien');
 INSERT INTO initial_sql_regress_simple (name) VALUES ('Semicolon;Man');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('This line has a Windows line ending');

+INSERT INTO initial_sql_regress_simple (name) VALUES ('This line has a Windows line ending');
 
diff --git a/lib/django-1.4/tests/regressiontests/middleware/models.py b/lib/django-1.4/tests/regressiontests/middleware/models.py
index 71abcc5..1e14da4 100644
--- a/lib/django-1.4/tests/regressiontests/middleware/models.py
+++ b/lib/django-1.4/tests/regressiontests/middleware/models.py
@@ -1 +1,11 @@
-# models.py file for tests to run.
+from django.db import models
+
+
+class Band(models.Model):
+    name = models.CharField(max_length=100)
+
+    class Meta:
+        ordering = ('name',)
+
+    def __unicode__(self):
+        return self.name
diff --git a/lib/django-1.4/tests/regressiontests/middleware/tests.py b/lib/django-1.4/tests/regressiontests/middleware/tests.py
index 6a1896a..138ee50 100644
--- a/lib/django-1.4/tests/regressiontests/middleware/tests.py
+++ b/lib/django-1.4/tests/regressiontests/middleware/tests.py
@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
+from __future__ import absolute_import, with_statement
 
 import gzip
 import re
@@ -7,15 +8,20 @@
 
 from django.conf import settings
 from django.core import mail
+from django.db import (transaction, connections, DEFAULT_DB_ALIAS,
+                       IntegrityError)
 from django.http import HttpRequest
 from django.http import HttpResponse
 from django.middleware.clickjacking import XFrameOptionsMiddleware
 from django.middleware.common import CommonMiddleware
 from django.middleware.http import ConditionalGetMiddleware
 from django.middleware.gzip import GZipMiddleware
-from django.test import TestCase, RequestFactory
+from django.middleware.transaction import TransactionMiddleware
+from django.test import TransactionTestCase, TestCase, RequestFactory
 from django.test.utils import override_settings
 
+from .models import Band
+
 class CommonMiddlewareTest(TestCase):
     def setUp(self):
         self.append_slash = settings.APPEND_SLASH
@@ -613,3 +619,64 @@
 ETagGZipMiddlewareTest = override_settings(
     USE_ETAGS=True,
 )(ETagGZipMiddlewareTest)
+
+class TransactionMiddlewareTest(TransactionTestCase):
+    """
+    Test the transaction middleware.
+    """
+    def setUp(self):
+        self.request = HttpRequest()
+        self.request.META = {
+            'SERVER_NAME': 'testserver',
+            'SERVER_PORT': 80,
+        }
+        self.request.path = self.request.path_info = "/"
+        self.response = HttpResponse()
+        self.response.status_code = 200
+
+    def test_request(self):
+        TransactionMiddleware().process_request(self.request)
+        self.assertTrue(transaction.is_managed())
+
+    def test_managed_response(self):
+        transaction.enter_transaction_management()
+        transaction.managed(True)
+        Band.objects.create(name='The Beatles')
+        self.assertTrue(transaction.is_dirty())
+        TransactionMiddleware().process_response(self.request, self.response)
+        self.assertFalse(transaction.is_dirty())
+        self.assertEqual(Band.objects.count(), 1)
+
+    def test_unmanaged_response(self):
+        transaction.managed(False)
+        TransactionMiddleware().process_response(self.request, self.response)
+        self.assertFalse(transaction.is_managed())
+        self.assertFalse(transaction.is_dirty())
+
+    def test_exception(self):
+        transaction.enter_transaction_management()
+        transaction.managed(True)
+        Band.objects.create(name='The Beatles')
+        self.assertTrue(transaction.is_dirty())
+        TransactionMiddleware().process_exception(self.request, None)
+        self.assertEqual(Band.objects.count(), 0)
+        self.assertFalse(transaction.is_dirty())
+
+    def test_failing_commit(self):
+        # It is possible that connection.commit() fails. Check that
+        # TransactionMiddleware handles such cases correctly.
+        try:
+            def raise_exception():
+                raise IntegrityError()
+            connections[DEFAULT_DB_ALIAS].commit = raise_exception
+            transaction.enter_transaction_management()
+            transaction.managed(True)
+            Band.objects.create(name='The Beatles')
+            self.assertTrue(transaction.is_dirty())
+            with self.assertRaises(IntegrityError):
+                TransactionMiddleware().process_response(self.request, None)
+            self.assertEqual(Band.objects.count(), 0)
+            self.assertFalse(transaction.is_dirty())
+            self.assertFalse(transaction.is_managed())
+        finally:
+            del connections[DEFAULT_DB_ALIAS].commit
diff --git a/lib/django-1.4/tests/regressiontests/model_fields/tests.py b/lib/django-1.4/tests/regressiontests/model_fields/tests.py
index a71ea77..30f0e7d 100644
--- a/lib/django-1.4/tests/regressiontests/model_fields/tests.py
+++ b/lib/django-1.4/tests/regressiontests/model_fields/tests.py
@@ -6,8 +6,15 @@
 from django import test
 from django import forms
 from django.core.exceptions import ValidationError
+from django.db.models.fields import (
+    AutoField, BigIntegerField, BooleanField, CharField,
+    CommaSeparatedIntegerField, DateField, DateTimeField, DecimalField,
+    EmailField, FilePathField, FloatField, IntegerField, IPAddressField,
+    GenericIPAddressField, NullBooleanField, PositiveIntegerField,
+    PositiveSmallIntegerField, SlugField, SmallIntegerField, TextField,
+    TimeField, URLField)
 from django.db import models
-from django.db.models.fields.files import FieldFile
+from django.db.models.fields.files import FileField, ImageField, FieldFile
 from django.utils import unittest
 
 from .models import (Foo, Bar, Whiz, BigD, BigS, Image, BigInt, Post,
@@ -373,3 +380,88 @@
         field = d._meta.get_field('myfile')
         field.save_form_data(d, 'else.txt')
         self.assertEqual(d.myfile, 'else.txt')
+
+
+class PrepValueTest(test.TestCase):
+    def test_AutoField(self):
+        self.assertIsInstance(AutoField(primary_key=True).get_prep_value(1), int)
+
+    def test_BigIntegerField(self):
+        self.assertIsInstance(BigIntegerField().get_prep_value(long(9999999999999999999)), long)
+
+    def test_BooleanField(self):
+        self.assertIsInstance(BooleanField().get_prep_value(True), bool)
+
+    def test_CharField(self):
+        self.assertIsInstance(CharField().get_prep_value(''), str)
+        self.assertIsInstance(CharField().get_prep_value(0), unicode)
+
+    def test_CommaSeparatedIntegerField(self):
+        self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value('1,2'), str)
+        self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value(0), unicode)
+
+    def test_DateField(self):
+        self.assertIsInstance(DateField().get_prep_value(datetime.date.today()), datetime.date)
+
+    def test_DateTimeField(self):
+        self.assertIsInstance(DateTimeField().get_prep_value(datetime.datetime.now()), datetime.datetime)
+
+    def test_DecimalField(self):
+        self.assertIsInstance(DecimalField().get_prep_value(Decimal('1.2')), Decimal)
+
+    def test_EmailField(self):
+        self.assertIsInstance(EmailField().get_prep_value('mailbox@domain.com'), str)
+
+    def test_FileField(self):
+        self.assertIsInstance(FileField().get_prep_value('filename.ext'), unicode)
+        self.assertIsInstance(FileField().get_prep_value(0), unicode)
+
+    def test_FilePathField(self):
+        self.assertIsInstance(FilePathField().get_prep_value('tests.py'), unicode)
+        self.assertIsInstance(FilePathField().get_prep_value(0), unicode)
+
+    def test_FloatField(self):
+        self.assertIsInstance(FloatField().get_prep_value(1.2), float)
+
+    def test_ImageField(self):
+        self.assertIsInstance(ImageField().get_prep_value('filename.ext'), unicode)
+
+    def test_IntegerField(self):
+        self.assertIsInstance(IntegerField().get_prep_value(1), int)
+
+    def test_IPAddressField(self):
+        self.assertIsInstance(IPAddressField().get_prep_value('127.0.0.1'), unicode)
+        self.assertIsInstance(IPAddressField().get_prep_value(0), unicode)
+
+    def test_GenericIPAddressField(self):
+        self.assertIsInstance(GenericIPAddressField().get_prep_value('127.0.0.1'), unicode)
+        self.assertIsInstance(GenericIPAddressField().get_prep_value(0), unicode)
+
+    def test_NullBooleanField(self):
+        self.assertIsInstance(NullBooleanField().get_prep_value(True), bool)
+
+    def test_PositiveIntegerField(self):
+        self.assertIsInstance(PositiveIntegerField().get_prep_value(1), int)
+
+    def test_PositiveSmallIntegerField(self):
+        self.assertIsInstance(PositiveSmallIntegerField().get_prep_value(1), int)
+
+    def test_SlugField(self):
+        self.assertIsInstance(SlugField().get_prep_value('slug'), str)
+        self.assertIsInstance(SlugField().get_prep_value(0), unicode)
+
+    def test_SmallIntegerField(self):
+        self.assertIsInstance(SmallIntegerField().get_prep_value(1), int)
+
+    def test_TextField(self):
+        self.assertIsInstance(TextField().get_prep_value('Abc'), str)
+        self.assertIsInstance(TextField().get_prep_value(0), unicode)
+
+    def test_TimeField(self):
+        self.assertIsInstance(
+            TimeField().get_prep_value(datetime.datetime.now().time()),
+            datetime.time)
+
+    def test_URLField(self):
+        self.assertIsInstance(URLField().get_prep_value('http://domain.com'), str)
+
diff --git a/lib/django-1.4/tests/regressiontests/requests/tests.py b/lib/django-1.4/tests/regressiontests/requests/tests.py
index caa25ae..2c9873c 100644
--- a/lib/django-1.4/tests/regressiontests/requests/tests.py
+++ b/lib/django-1.4/tests/regressiontests/requests/tests.py
@@ -6,12 +6,15 @@
 from datetime import datetime, timedelta
 from StringIO import StringIO
 
+from django.db import connection, connections, DEFAULT_DB_ALIAS
+from django.core import signals
 from django.conf import settings
 from django.core.handlers.modpython import ModPythonRequest
 from django.core.exceptions import SuspiciousOperation
 from django.core.handlers.wsgi import WSGIRequest, LimitedStream
 from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
-from django.test.utils import get_warnings_state, restore_warnings_state
+from django.test import TransactionTestCase
+from django.test.utils import get_warnings_state, restore_warnings_state, override_settings
 from django.utils import unittest
 from django.utils.http import cookie_date
 from django.utils.timezone import utc
@@ -106,161 +109,168 @@
         self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
             'http://www.example.com/path/with:colons')
 
+    @override_settings(
+        USE_X_FORWARDED_HOST=False,
+        ALLOWED_HOSTS=[
+            'forward.com', 'example.com', 'internal.com', '12.34.56.78',
+            '[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
+            '.multitenant.com', 'INSENSITIVE.com',
+            ])
     def test_http_get_host(self):
-        old_USE_X_FORWARDED_HOST = settings.USE_X_FORWARDED_HOST
-        try:
-            settings.USE_X_FORWARDED_HOST = False
+        # Check if X_FORWARDED_HOST is provided.
+        request = HttpRequest()
+        request.META = {
+            'HTTP_X_FORWARDED_HOST': 'forward.com',
+            'HTTP_HOST': 'example.com',
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        # X_FORWARDED_HOST is ignored.
+        self.assertEqual(request.get_host(), 'example.com')
 
-            # Check if X_FORWARDED_HOST is provided.
+        # Check if X_FORWARDED_HOST isn't provided.
+        request = HttpRequest()
+        request.META = {
+            'HTTP_HOST': 'example.com',
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        self.assertEqual(request.get_host(), 'example.com')
+
+        # Check if HTTP_HOST isn't provided.
+        request = HttpRequest()
+        request.META = {
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        self.assertEqual(request.get_host(), 'internal.com')
+
+        # Check if HTTP_HOST isn't provided, and we're on a nonstandard port
+        request = HttpRequest()
+        request.META = {
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 8042,
+        }
+        self.assertEqual(request.get_host(), 'internal.com:8042')
+
+        # Poisoned host headers are rejected as suspicious
+        legit_hosts = [
+            'example.com',
+            'example.com:80',
+            '12.34.56.78',
+            '12.34.56.78:443',
+            '[2001:19f0:feee::dead:beef:cafe]',
+            '[2001:19f0:feee::dead:beef:cafe]:8080',
+            'xn--4ca9at.com', # Punnycode for öäü.com
+            'anything.multitenant.com',
+            'multitenant.com',
+            'insensitive.com',
+        ]
+
+        poisoned_hosts = [
+            'example.com@evil.tld',
+            'example.com:dr.frankenstein@evil.tld',
+            'example.com:dr.frankenstein@evil.tld:80',
+            'example.com:80/badpath',
+            'example.com: recovermypassword.com',
+            'other.com', # not in ALLOWED_HOSTS
+        ]
+
+        for host in legit_hosts:
             request = HttpRequest()
             request.META = {
-                u'HTTP_X_FORWARDED_HOST': u'forward.com',
-                u'HTTP_HOST': u'example.com',
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
+                'HTTP_HOST': host,
             }
-            # X_FORWARDED_HOST is ignored.
-            self.assertEqual(request.get_host(), 'example.com')
+            request.get_host()
 
-            # Check if X_FORWARDED_HOST isn't provided.
-            request = HttpRequest()
-            request.META = {
-                u'HTTP_HOST': u'example.com',
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
-            }
-            self.assertEqual(request.get_host(), 'example.com')
-
-            # Check if HTTP_HOST isn't provided.
-            request = HttpRequest()
-            request.META = {
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
-            }
-            self.assertEqual(request.get_host(), 'internal.com')
-
-            # Check if HTTP_HOST isn't provided, and we're on a nonstandard port
-            request = HttpRequest()
-            request.META = {
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 8042,
-            }
-            self.assertEqual(request.get_host(), 'internal.com:8042')
-
-            # Poisoned host headers are rejected as suspicious
-            legit_hosts = [
-                'example.com',
-                'example.com:80',
-                '12.34.56.78',
-                '12.34.56.78:443',
-                '[2001:19f0:feee::dead:beef:cafe]',
-                '[2001:19f0:feee::dead:beef:cafe]:8080',
-                'xn--4ca9at.com', # Punnycode for öäü.com
-            ]
-
-            poisoned_hosts = [
-                'example.com@evil.tld',
-                'example.com:dr.frankenstein@evil.tld',
-                'example.com:dr.frankenstein@evil.tld:80',
-                'example.com:80/badpath',
-                'example.com: recovermypassword.com',
-            ]
-
-            for host in legit_hosts:
+        for host in poisoned_hosts:
+            with self.assertRaises(SuspiciousOperation):
                 request = HttpRequest()
                 request.META = {
                     'HTTP_HOST': host,
                 }
                 request.get_host()
 
-            for host in poisoned_hosts:
-                with self.assertRaises(SuspiciousOperation):
-                    request = HttpRequest()
-                    request.META = {
-                        'HTTP_HOST': host,
-                    }
-                    request.get_host()
-
-        finally:
-            settings.USE_X_FORWARDED_HOST = old_USE_X_FORWARDED_HOST
-
+    @override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
     def test_http_get_host_with_x_forwarded_host(self):
-        old_USE_X_FORWARDED_HOST = settings.USE_X_FORWARDED_HOST
-        try:
-            settings.USE_X_FORWARDED_HOST = True
+        # Check if X_FORWARDED_HOST is provided.
+        request = HttpRequest()
+        request.META = {
+            'HTTP_X_FORWARDED_HOST': 'forward.com',
+            'HTTP_HOST': 'example.com',
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        # X_FORWARDED_HOST is obeyed.
+        self.assertEqual(request.get_host(), 'forward.com')
 
-            # Check if X_FORWARDED_HOST is provided.
+        # Check if X_FORWARDED_HOST isn't provided.
+        request = HttpRequest()
+        request.META = {
+            'HTTP_HOST': 'example.com',
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        self.assertEqual(request.get_host(), 'example.com')
+
+        # Check if HTTP_HOST isn't provided.
+        request = HttpRequest()
+        request.META = {
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 80,
+        }
+        self.assertEqual(request.get_host(), 'internal.com')
+
+        # Check if HTTP_HOST isn't provided, and we're on a nonstandard port
+        request = HttpRequest()
+        request.META = {
+            'SERVER_NAME': 'internal.com',
+            'SERVER_PORT': 8042,
+        }
+        self.assertEqual(request.get_host(), 'internal.com:8042')
+
+        # Poisoned host headers are rejected as suspicious
+        legit_hosts = [
+            'example.com',
+            'example.com:80',
+            '12.34.56.78',
+            '12.34.56.78:443',
+            '[2001:19f0:feee::dead:beef:cafe]',
+            '[2001:19f0:feee::dead:beef:cafe]:8080',
+            'xn--4ca9at.com', # Punnycode for öäü.com
+        ]
+
+        poisoned_hosts = [
+            'example.com@evil.tld',
+            'example.com:dr.frankenstein@evil.tld',
+            'example.com:dr.frankenstein@evil.tld:80',
+            'example.com:80/badpath',
+            'example.com: recovermypassword.com',
+        ]
+
+        for host in legit_hosts:
             request = HttpRequest()
             request.META = {
-                u'HTTP_X_FORWARDED_HOST': u'forward.com',
-                u'HTTP_HOST': u'example.com',
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
+                'HTTP_HOST': host,
             }
-            # X_FORWARDED_HOST is obeyed.
-            self.assertEqual(request.get_host(), 'forward.com')
+            request.get_host()
 
-            # Check if X_FORWARDED_HOST isn't provided.
-            request = HttpRequest()
-            request.META = {
-                u'HTTP_HOST': u'example.com',
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
-            }
-            self.assertEqual(request.get_host(), 'example.com')
-
-            # Check if HTTP_HOST isn't provided.
-            request = HttpRequest()
-            request.META = {
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 80,
-            }
-            self.assertEqual(request.get_host(), 'internal.com')
-
-            # Check if HTTP_HOST isn't provided, and we're on a nonstandard port
-            request = HttpRequest()
-            request.META = {
-                u'SERVER_NAME': u'internal.com',
-                u'SERVER_PORT': 8042,
-            }
-            self.assertEqual(request.get_host(), 'internal.com:8042')
-
-            # Poisoned host headers are rejected as suspicious
-            legit_hosts = [
-                'example.com',
-                'example.com:80',
-                '12.34.56.78',
-                '12.34.56.78:443',
-                '[2001:19f0:feee::dead:beef:cafe]',
-                '[2001:19f0:feee::dead:beef:cafe]:8080',
-                'xn--4ca9at.com', # Punnycode for öäü.com
-            ]
-
-            poisoned_hosts = [
-                'example.com@evil.tld',
-                'example.com:dr.frankenstein@evil.tld',
-                'example.com:dr.frankenstein@evil.tld:80',
-                'example.com:80/badpath',
-                'example.com: recovermypassword.com',
-            ]
-
-            for host in legit_hosts:
+        for host in poisoned_hosts:
+            with self.assertRaises(SuspiciousOperation):
                 request = HttpRequest()
                 request.META = {
                     'HTTP_HOST': host,
                 }
                 request.get_host()
 
-            for host in poisoned_hosts:
-                with self.assertRaises(SuspiciousOperation):
-                    request = HttpRequest()
-                    request.META = {
-                        'HTTP_HOST': host,
-                    }
-                    request.get_host()
-
-        finally:
-            settings.USE_X_FORWARDED_HOST = old_USE_X_FORWARDED_HOST
+    @override_settings(DEBUG=True, ALLOWED_HOSTS=[])
+    def test_host_validation_disabled_in_debug_mode(self):
+        """If ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass."""
+        request = HttpRequest()
+        request.META = {
+            'HTTP_HOST': 'example.com',
+        }
+        self.assertEqual(request.get_host(), 'example.com')
 
     def test_near_expiration(self):
         "Cookie will expire when an near expiration time is provided"
@@ -530,3 +540,40 @@
 
         with self.assertRaises(UnreadablePostError):
             request.raw_post_data
+
+class TransactionRequestTests(TransactionTestCase):
+    def test_request_finished_db_state(self):
+        # Make sure there is an open connection
+        connection.cursor()
+        connection.enter_transaction_management()
+        connection.managed(True)
+        signals.request_finished.send(sender=self.__class__)
+        # In-memory sqlite doesn't actually close connections.
+        if connection.vendor != 'sqlite':
+            self.assertIs(connection.connection, None)
+        self.assertEqual(len(connection.transaction_state), 0)
+
+    @unittest.skipIf(connection.vendor == 'sqlite',
+                     'This test will close the connection, in-memory '
+                     'sqlite connections must not be closed.')
+    def test_request_finished_failed_connection(self):
+        conn = connections[DEFAULT_DB_ALIAS]
+        conn.enter_transaction_management()
+        conn.managed(True)
+        conn.set_dirty()
+        # Test that the rollback doesn't succeed (for example network failure
+        # could cause this).
+        def fail_horribly():
+            raise Exception("Horrible failure!")
+        conn._rollback = fail_horribly
+        try:
+            with self.assertRaises(Exception):
+                signals.request_finished.send(sender=self.__class__)
+            # The connection's state wasn't cleaned up
+            self.assertTrue(len(connection.transaction_state), 1)
+        finally:
+            del conn._rollback
+        # The connection will be cleaned on next request where the conn
+        # works again.
+        signals.request_finished.send(sender=self.__class__)
+        self.assertEqual(len(connection.transaction_state), 0)
diff --git a/lib/django-1.4/tests/regressiontests/serializers_regress/tests.py b/lib/django-1.4/tests/regressiontests/serializers_regress/tests.py
index 65194da..ac1d9da 100644
--- a/lib/django-1.4/tests/regressiontests/serializers_regress/tests.py
+++ b/lib/django-1.4/tests/regressiontests/serializers_regress/tests.py
@@ -16,6 +16,7 @@
     from cStringIO import StringIO
 except ImportError:
     from StringIO import StringIO
+from django.core.serializers.xml_serializer import DTDForbidden
 
 try:
     import yaml
@@ -523,3 +524,16 @@
     if format != 'python':
         setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
 
+
+class XmlDeserializerSecurityTests(TestCase):
+
+    def test_no_dtd(self):
+        """
+        The XML deserializer shouldn't allow a DTD.
+
+        This is the most straightforward way to prevent all entity definitions
+        and avoid both external entities and entity-expansion attacks.
+
+        """
+        xml = '<?xml version="1.0" standalone="no"?><!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
+        self.assertRaises(DTDForbidden, serializers.deserialize('xml', xml).next)
diff --git a/lib/django-1.4/tests/regressiontests/templates/tests.py b/lib/django-1.4/tests/regressiontests/templates/tests.py
index f74aa75..db19636 100644
--- a/lib/django-1.4/tests/regressiontests/templates/tests.py
+++ b/lib/django-1.4/tests/regressiontests/templates/tests.py
@@ -1764,3 +1764,37 @@
             template.Template('{% include "child" only %}').render(ctx),
             'none'
         )
+
+skip_reason = "The {%% ssi %%} tag in Django 1.4 doesn't support spaces in path."
+class SSITests(unittest.TestCase):
+    def setUp(self):
+        self.this_dir = os.path.dirname(os.path.abspath(__file__))
+        self.ssi_dir = os.path.join(self.this_dir, "templates", "first")
+
+    def render_ssi(self, path):
+        # the path must exist for the test to be reliable
+        self.assertTrue(os.path.exists(path))
+        return template.Template('{%% ssi %s %%}' % path).render(Context())
+
+    @unittest.skipIf(' ' in __file__, skip_reason)
+    def test_allowed_paths(self):
+        acceptable_path = os.path.join(self.ssi_dir, "..", "first", "test.html")
+        with override_settings(ALLOWED_INCLUDE_ROOTS=(self.ssi_dir,)):
+            self.assertEqual(self.render_ssi(acceptable_path), 'First template\n')
+
+    @unittest.skipIf(' ' in __file__, skip_reason)
+    def test_relative_include_exploit(self):
+        """
+        May not bypass ALLOWED_INCLUDE_ROOTS with relative paths
+
+        e.g. if ALLOWED_INCLUDE_ROOTS = ("/var/www",), it should not be
+        possible to do {% ssi "/var/www/../../etc/passwd" %}
+        """
+        disallowed_paths = [
+            os.path.join(self.ssi_dir, "..", "ssi_include.html"),
+            os.path.join(self.ssi_dir, "..", "second", "test.html"),
+        ]
+        with override_settings(ALLOWED_INCLUDE_ROOTS=(self.ssi_dir,)):
+            for path in disallowed_paths:
+                self.assertIn(self.render_ssi(path),
+                              ['', "[Didn't have permission to include file]"])
diff --git a/lib/django-1.4/tests/regressiontests/test_runner/tests.py b/lib/django-1.4/tests/regressiontests/test_runner/tests.py
index 22e9fe6..7f54522 100644
--- a/lib/django-1.4/tests/regressiontests/test_runner/tests.py
+++ b/lib/django-1.4/tests/regressiontests/test_runner/tests.py
@@ -10,7 +10,7 @@
 from django.core.exceptions import ImproperlyConfigured
 from django.core.management import call_command
 from django import db
-from django.test import simple
+from django.test import simple, skipIfDBFeature
 from django.test.simple import DjangoTestSuiteRunner, get_tests
 from django.test.testcases import connections_support_transactions
 from django.test.utils import get_warnings_state, restore_warnings_state
@@ -217,6 +217,13 @@
 
 
 class Ticket16885RegressionTests(unittest.TestCase):
+    
+    # Skipped if empty strings are nulls because this feature causes
+    # database setup to fail on model validation for models defined
+    # with string PKs (such models are already in the AppCache), while
+    # the test cares neither about models nor about the database backend
+    # from settings.
+    @skipIfDBFeature('interprets_empty_strings_as_nulls')
     def test_ticket_16885(self):
         """Features are also confirmed on mirrored databases."""
         old_db_connections = db.connections
diff --git a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/nonimported_module.py b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/nonimported_module.py
new file mode 100644
index 0000000..df04633
--- /dev/null
+++ b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/nonimported_module.py
@@ -0,0 +1,3 @@
+def view(request):
+    """Stub view"""
+    pass
diff --git a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/tests.py b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/tests.py
index a1c9244..0ea732b 100644
--- a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/tests.py
+++ b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/tests.py
@@ -1,8 +1,11 @@
+# -*- coding: utf-8 -*-
 """
 Unit tests for reverse URL lookups.
 """
 from __future__ import absolute_import
 
+import sys
+
 from django.conf import settings
 from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
 from django.core.urlresolvers import (reverse, resolve, NoReverseMatch,
@@ -267,6 +270,25 @@
         self.assertEqual(res['Location'], '/foo/')
         res = redirect('http://example.com/')
         self.assertEqual(res['Location'], 'http://example.com/')
+        # Assert that we can redirect using UTF-8 strings
+        res = redirect('/æøå/abc/')
+        self.assertEqual(res['Location'], '/%C3%A6%C3%B8%C3%A5/abc/')
+        # Assert that no imports are attempted when dealing with a relative path
+        # (previously, the below would resolve in a UnicodeEncodeError from __import__ )
+        res = redirect('/æøå.abc/')
+        self.assertEqual(res['Location'], '/%C3%A6%C3%B8%C3%A5.abc/')
+        res = redirect('os.path')
+        self.assertEqual(res['Location'], 'os.path')
+
+    def test_no_illegal_imports(self):
+        # modules that are not listed in urlpatterns should not be importable
+        redirect("urlpatterns_reverse.nonimported_module.view")
+        self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
+
+    def test_reverse_by_path_nested(self):
+        # Views that are added to urlpatterns using include() should be
+        # reversable by doted path.
+        self.assertEqual(reverse('regressiontests.urlpatterns_reverse.views.nested_view'), '/includes/nested_path/')
 
     def test_redirect_view_object(self):
         from .views import absolute_kwargs_view
@@ -510,4 +532,3 @@
         self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_inner/')
         self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_outer/')
         self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable/')
-
diff --git a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/urls.py b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/urls.py
index 1d4ae73..7aae7c4 100644
--- a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/urls.py
+++ b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/urls.py
@@ -2,11 +2,12 @@
 
 from django.conf.urls import patterns, url, include
 
-from .views import empty_view, absolute_kwargs_view
+from .views import empty_view, empty_view_partial, empty_view_wrapped, absolute_kwargs_view
 
 
 other_patterns = patterns('',
     url(r'non_path_include/$', empty_view, name='non_path_include'),
+    url(r'nested_path/$', 'regressiontests.urlpatterns_reverse.views.nested_view'),
 )
 
 urlpatterns = patterns('',
@@ -55,6 +56,10 @@
     # This is non-reversible, but we shouldn't blow up when parsing it.
     url(r'^(?:foo|bar)(\w+)/$', empty_view, name="disjunction"),
 
+    # Partials should be fine.
+    url(r'^partial/', empty_view_partial, name="partial"),
+    url(r'^partial_wrapped/', empty_view_wrapped, name="partial_wrapped"),
+
     # Regression views for #9038. See tests for more details
     url(r'arg_view/$', 'kwargs_view'),
     url(r'arg_view/(?P<arg1>\d+)/$', 'kwargs_view'),
diff --git a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/views.py b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/views.py
index f631acf..de09552 100644
--- a/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/views.py
+++ b/lib/django-1.4/tests/regressiontests/urlpatterns_reverse/views.py
@@ -1,3 +1,5 @@
+from functools import partial, update_wrapper
+
 from django.http import HttpResponse
 from django.views.generic import RedirectView
 from django.core.urlresolvers import reverse_lazy
@@ -16,6 +18,10 @@
 def defaults_view(request, arg1, arg2):
     pass
 
+def nested_view(request):
+    pass
+
+
 def erroneous_view(request):
     import non_existent
 
@@ -36,3 +42,11 @@
 
 def bad_view(request, *args, **kwargs):
     raise ValueError("I don't think I'm getting good value for this view")
+
+
+empty_view_partial = partial(empty_view, template_name="template.html")
+
+
+empty_view_wrapped = update_wrapper(
+    partial(empty_view, template_name="template.html"), empty_view,
+)
diff --git a/lib/django-1.4/tests/regressiontests/utils/http.py b/lib/django-1.4/tests/regressiontests/utils/http.py
index 16c7daa..802b3fa 100644
--- a/lib/django-1.4/tests/regressiontests/utils/http.py
+++ b/lib/django-1.4/tests/regressiontests/utils/http.py
@@ -56,50 +56,6 @@
         ]
         self.assertTrue(result in acceptable_results)
 
-    def test_fix_IE_for_vary(self):
-        """
-        Regression for #16632.
-
-        `fix_IE_for_vary` shouldn't crash when there's no Content-Type header.
-        """
-
-        # functions to generate responses
-        def response_with_unsafe_content_type():
-            r = HttpResponse(content_type="text/unsafe")
-            r['Vary'] = 'Cookie'
-            return r
-
-        def no_content_response_with_unsafe_content_type():
-            # 'Content-Type' always defaulted, so delete it
-            r = response_with_unsafe_content_type()
-            del r['Content-Type']
-            return r
-
-        # request with & without IE user agent
-        rf = RequestFactory()
-        request = rf.get('/')
-        ie_request = rf.get('/', HTTP_USER_AGENT='MSIE')
-
-        # not IE, unsafe_content_type
-        response = response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(request, response)
-        self.assertTrue('Vary' in response)
-
-        # IE, unsafe_content_type
-        response = response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(ie_request, response)
-        self.assertFalse('Vary' in response)
-
-        # not IE, no_content
-        response = no_content_response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(request, response)
-        self.assertTrue('Vary' in response)
-
-        # IE, no_content
-        response = no_content_response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(ie_request, response)
-        self.assertFalse('Vary' in response)
-
     def test_base36(self):
         # reciprocity works
         for n in [0, 1, 1000, 1000000, sys.maxint]:
@@ -122,3 +78,33 @@
         for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]:
             self.assertEqual(http.int_to_base36(n), b36)
             self.assertEqual(http.base36_to_int(b36), n)
+
+    def test_is_safe_url(self):
+        for bad_url in ('http://example.com',
+                        'http:///example.com',
+                        'https://example.com',
+                        'ftp://exampel.com',
+                        r'\\example.com',
+                        r'\\\example.com',
+                        r'/\\/example.com',
+                        r'\\\example.com',
+                        r'\\example.com',
+                        r'\\//example.com',
+                        r'/\/example.com',
+                        r'\/example.com',
+                        r'/\example.com',
+                        'http:///example.com',
+                        'http:/\//example.com',
+                        'http:\/example.com',
+                        'http:/\example.com',
+                        'javascript:alert("XSS")'):
+            self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
+        for good_url in ('/view/?param=http://example.com',
+                     '/view/?param=https://example.com',
+                     '/view?param=ftp://exampel.com',
+                     'view/?param=//example.com',
+                     'https://testserver/',
+                     'HTTPS://testserver/',
+                     '//testserver/',
+                     '/url%20with%20spaces/'):
+            self.assertTrue(http.is_safe_url(good_url, host='testserver'), "%s should be allowed" % good_url)
diff --git a/lib/django-1.4/tests/requirements/base.txt b/lib/django-1.4/tests/requirements/base.txt
new file mode 100644
index 0000000..c89533b
--- /dev/null
+++ b/lib/django-1.4/tests/requirements/base.txt
@@ -0,0 +1,10 @@
+docutils
+Markdown
+numpy
+PIL
+py-bcrypt
+python-memcached
+pytz
+PyYAML
+selenium
+Textile
diff --git a/lib/django-1.4/tests/requirements/mysql.txt b/lib/django-1.4/tests/requirements/mysql.txt
new file mode 100644
index 0000000..c7a2347
--- /dev/null
+++ b/lib/django-1.4/tests/requirements/mysql.txt
@@ -0,0 +1 @@
+MySQL-python
diff --git a/lib/django-1.4/tests/requirements/oracle.txt b/lib/django-1.4/tests/requirements/oracle.txt
new file mode 100644
index 0000000..ae5b734
--- /dev/null
+++ b/lib/django-1.4/tests/requirements/oracle.txt
@@ -0,0 +1 @@
+cx_oracle
diff --git a/lib/django-1.4/tests/requirements/postgres.txt b/lib/django-1.4/tests/requirements/postgres.txt
new file mode 100644
index 0000000..658130b
--- /dev/null
+++ b/lib/django-1.4/tests/requirements/postgres.txt
@@ -0,0 +1 @@
+psycopg2
diff --git a/lib/django-1.5/AUTHORS b/lib/django-1.5/AUTHORS
index 357533d..7ad0938 100644
--- a/lib/django-1.5/AUTHORS
+++ b/lib/django-1.5/AUTHORS
@@ -236,6 +236,7 @@
     Janos Guljas
     Thomas Güttler <hv@tbz-pariv.de>
     Horst Gutmann <zerok@zerokspot.com>
+    Bouke Haarsma <bouke@haarsma.eu>
     Antti Haapala <antti@industrialwebandmagic.com>
     Scot Hacker <shacker@birdhouse.org>
     dAniel hAhler
diff --git a/lib/django-1.5/PKG-INFO b/lib/django-1.5/PKG-INFO
index 5b0db4f..4af90bf 100644
--- a/lib/django-1.5/PKG-INFO
+++ b/lib/django-1.5/PKG-INFO
@@ -1,12 +1,12 @@
 Metadata-Version: 1.1
 Name: Django
-Version: 1.5.4
+Version: 1.5.8
 Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
 Home-page: http://www.djangoproject.com/
 Author: Django Software Foundation
 Author-email: foundation@djangoproject.com
 License: BSD
-Download-URL: https://www.djangoproject.com/m/releases/1.5/Django-1.5.4.tar.gz
+Download-URL: https://www.djangoproject.com/m/releases/1.5/Django-1.5.8.tar.gz
 Description: UNKNOWN
 Platform: UNKNOWN
 Classifier: Development Status :: 5 - Production/Stable
diff --git a/lib/django-1.5/django/__init__.py b/lib/django-1.5/django/__init__.py
index 00166a4..2415914 100644
--- a/lib/django-1.5/django/__init__.py
+++ b/lib/django-1.5/django/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (1, 5, 4, 'final', 0)
+VERSION = (1, 5, 8, 'final', 0)
 
 def get_version(*args, **kwargs):
     # Don't litter django/__init__.py with all the get_version stuff.
diff --git a/lib/django-1.5/django/contrib/admin/tests.py b/lib/django-1.5/django/contrib/admin/tests.py
index 78e68e7..f529fae 100644
--- a/lib/django-1.5/django/contrib/admin/tests.py
+++ b/lib/django-1.5/django/contrib/admin/tests.py
@@ -17,13 +17,14 @@
         except Exception as e:
             raise SkipTest('Selenium webdriver "%s" not installed or not '
                            'operational: %s' % (cls.webdriver_class, str(e)))
+        # This has to be last to ensure that resources are cleaned up properly!
         super(AdminSeleniumWebDriverTestCase, cls).setUpClass()
 
     @classmethod
-    def tearDownClass(cls):
+    def _tearDownClassInternal(cls):
         if hasattr(cls, 'selenium'):
             cls.selenium.quit()
-        super(AdminSeleniumWebDriverTestCase, cls).tearDownClass()
+        super(AdminSeleniumWebDriverTestCase, cls)._tearDownClassInternal()
 
     def wait_until(self, callback, timeout=10):
         """
diff --git a/lib/django-1.5/django/contrib/auth/backends.py b/lib/django-1.5/django/contrib/auth/backends.py
index 6b31f72..aca270b 100644
--- a/lib/django-1.5/django/contrib/auth/backends.py
+++ b/lib/django-1.5/django/contrib/auth/backends.py
@@ -62,8 +62,8 @@
         return False
 
     def get_user(self, user_id):
+        UserModel = get_user_model()
         try:
-            UserModel = get_user_model()
             return UserModel._default_manager.get(pk=user_id)
         except UserModel.DoesNotExist:
             return None
diff --git a/lib/django-1.5/django/contrib/auth/forms.py b/lib/django-1.5/django/contrib/auth/forms.py
index d191635..cbce8ad 100644
--- a/lib/django-1.5/django/contrib/auth/forms.py
+++ b/lib/django-1.5/django/contrib/auth/forms.py
@@ -12,9 +12,7 @@
 
 from django.contrib.auth import authenticate, get_user_model
 from django.contrib.auth.models import User
-from django.contrib.auth.hashers import (
-    MAXIMUM_PASSWORD_LENGTH, UNUSABLE_PASSWORD, identify_hasher,
-)
+from django.contrib.auth.hashers import UNUSABLE_PASSWORD, identify_hasher
 from django.contrib.auth.tokens import default_token_generator
 from django.contrib.sites.models import get_current_site
 
@@ -77,10 +75,9 @@
             'invalid': _("This value may contain only letters, numbers and "
                          "@/./+/-/_ characters.")})
     password1 = forms.CharField(label=_("Password"),
-        widget=forms.PasswordInput, max_length=MAXIMUM_PASSWORD_LENGTH)
+        widget=forms.PasswordInput)
     password2 = forms.CharField(label=_("Password confirmation"),
         widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
         help_text=_("Enter the same password as above, for verification."))
 
     class Meta:
@@ -148,11 +145,7 @@
     username/password logins.
     """
     username = forms.CharField(max_length=254)
-    password = forms.CharField(
-        label=_("Password"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
+    password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
 
     error_messages = {
         'invalid_login': _("Please enter a correct %(username)s and password. "
@@ -276,16 +269,10 @@
     error_messages = {
         'password_mismatch': _("The two password fields didn't match."),
     }
-    new_password1 = forms.CharField(
-        label=_("New password"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
-    new_password2 = forms.CharField(
-        label=_("New password confirmation"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
+    new_password1 = forms.CharField(label=_("New password"),
+                                    widget=forms.PasswordInput)
+    new_password2 = forms.CharField(label=_("New password confirmation"),
+                                    widget=forms.PasswordInput)
 
     def __init__(self, user, *args, **kwargs):
         self.user = user
@@ -316,11 +303,8 @@
         'password_incorrect': _("Your old password was entered incorrectly. "
                                 "Please enter it again."),
     })
-    old_password = forms.CharField(
-        label=_("Old password"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
+    old_password = forms.CharField(label=_("Old password"),
+                                   widget=forms.PasswordInput)
 
     def clean_old_password(self):
         """
@@ -345,16 +329,10 @@
     error_messages = {
         'password_mismatch': _("The two password fields didn't match."),
     }
-    password1 = forms.CharField(
-        label=_("Password"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
-    password2 = forms.CharField(
-        label=_("Password (again)"),
-        widget=forms.PasswordInput,
-        max_length=MAXIMUM_PASSWORD_LENGTH,
-    )
+    password1 = forms.CharField(label=_("Password"),
+                                widget=forms.PasswordInput)
+    password2 = forms.CharField(label=_("Password (again)"),
+                                widget=forms.PasswordInput)
 
     def __init__(self, user, *args, **kwargs):
         self.user = user
diff --git a/lib/django-1.5/django/contrib/auth/hashers.py b/lib/django-1.5/django/contrib/auth/hashers.py
index a9d5d7b..b49362f 100644
--- a/lib/django-1.5/django/contrib/auth/hashers.py
+++ b/lib/django-1.5/django/contrib/auth/hashers.py
@@ -1,7 +1,6 @@
 from __future__ import unicode_literals
 
 import base64
-import functools
 import hashlib
 
 from django.dispatch import receiver
@@ -17,7 +16,6 @@
 
 
 UNUSABLE_PASSWORD = '!'  # This will never be a valid encoded hash
-MAXIMUM_PASSWORD_LENGTH = 4096  # The maximum length a password can be to prevent DoS
 HASHERS = None  # lazily loaded from PASSWORD_HASHERS
 PREFERRED_HASHER = None  # defaults to first item in PASSWORD_HASHERS
 
@@ -29,18 +27,6 @@
         PREFERRED_HASHER = None
 
 
-def password_max_length(max_length):
-    def inner(fn):
-        @functools.wraps(fn)
-        def wrapper(self, password, *args, **kwargs):
-            if len(password) > max_length:
-                raise ValueError("Invalid password; Must be less than or equal"
-                                 " to %d bytes" % max_length)
-            return fn(self, password, *args, **kwargs)
-        return wrapper
-    return inner
-
-
 def is_password_usable(encoded):
     if encoded is None or encoded == UNUSABLE_PASSWORD:
         return False
@@ -239,7 +225,6 @@
     iterations = 10000
     digest = hashlib.sha256
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt, iterations=None):
         assert password
         assert salt and '$' not in salt
@@ -249,7 +234,6 @@
         hash = base64.b64encode(hash).decode('ascii').strip()
         return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         algorithm, iterations, salt, hash = encoded.split('$', 3)
         assert algorithm == self.algorithm
@@ -295,7 +279,6 @@
         bcrypt = self._load_library()
         return bcrypt.gensalt(self.rounds)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         bcrypt = self._load_library()
         # Need to reevaluate the force_bytes call once bcrypt is supported on
@@ -303,7 +286,6 @@
         data = bcrypt.hashpw(force_bytes(password), salt)
         return "%s$%s" % (self.algorithm, data)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         algorithm, data = encoded.split('$', 1)
         assert algorithm == self.algorithm
@@ -328,14 +310,12 @@
     """
     algorithm = "sha1"
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         assert password
         assert salt and '$' not in salt
         hash = hashlib.sha1(force_bytes(salt + password)).hexdigest()
         return "%s$%s$%s" % (self.algorithm, salt, hash)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         algorithm, salt, hash = encoded.split('$', 2)
         assert algorithm == self.algorithm
@@ -358,14 +338,12 @@
     """
     algorithm = "md5"
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         assert password
         assert salt and '$' not in salt
         hash = hashlib.md5(force_bytes(salt + password)).hexdigest()
         return "%s$%s$%s" % (self.algorithm, salt, hash)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         algorithm, salt, hash = encoded.split('$', 2)
         assert algorithm == self.algorithm
@@ -396,13 +374,11 @@
     def salt(self):
         return ''
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         assert salt == ''
         hash = hashlib.sha1(force_bytes(password)).hexdigest()
         return 'sha1$$%s' % hash
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         encoded_2 = self.encode(password, '')
         return constant_time_compare(encoded, encoded_2)
@@ -432,12 +408,10 @@
     def salt(self):
         return ''
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         assert salt == ''
         return hashlib.md5(force_bytes(password)).hexdigest()
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         if len(encoded) == 37 and encoded.startswith('md5$$'):
             encoded = encoded[5:]
@@ -463,7 +437,6 @@
     def salt(self):
         return get_random_string(2)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def encode(self, password, salt):
         crypt = self._load_library()
         assert len(salt) == 2
@@ -471,7 +444,6 @@
         # we don't need to store the salt, but Django used to do this
         return "%s$%s$%s" % (self.algorithm, '', data)
 
-    @password_max_length(MAXIMUM_PASSWORD_LENGTH)
     def verify(self, password, encoded):
         crypt = self._load_library()
         algorithm, salt, data = encoded.split('$', 2)
@@ -486,3 +458,4 @@
             (_('salt'), salt),
             (_('hash'), mask_hash(data, show=3)),
         ])
+
diff --git a/lib/django-1.5/django/contrib/auth/tests/auth_backends.py b/lib/django-1.5/django/contrib/auth/tests/auth_backends.py
index be29d9e..0ac2bf2 100644
--- a/lib/django-1.5/django/contrib/auth/tests/auth_backends.py
+++ b/lib/django-1.5/django/contrib/auth/tests/auth_backends.py
@@ -7,7 +7,8 @@
 from django.contrib.auth.tests.custom_user import ExtensionUser, CustomPermissionsUser, CustomUser
 from django.contrib.contenttypes.models import ContentType
 from django.core.exceptions import ImproperlyConfigured
-from django.contrib.auth import authenticate
+from django.contrib.auth import authenticate, get_user
+from django.http import HttpRequest
 from django.test import TestCase
 from django.test.utils import override_settings
 
@@ -367,3 +368,27 @@
     def test_has_module_perms(self):
         self.assertEqual(self.user1.has_module_perms("app1"), False)
         self.assertEqual(self.user1.has_module_perms("app2"), False)
+
+
+@skipIfCustomUser
+class ImproperlyConfiguredUserModelTest(TestCase):
+    """
+    Tests that an exception from within get_user_model is propagated and doesn't
+    raise an UnboundLocalError.
+
+    Regression test for ticket #21439
+    """
+    def setUp(self):
+        self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
+        self.client.login(
+            username='test',
+            password='test'
+        )
+
+    @override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
+    def test_does_not_shadow_exception(self):
+        # Prepare a request object
+        request = HttpRequest()
+        request.session = self.client.session
+
+        self.assertRaises(ImproperlyConfigured, get_user, request)
diff --git a/lib/django-1.5/django/contrib/auth/tests/hashers.py b/lib/django-1.5/django/contrib/auth/tests/hashers.py
index 8c35f54..be56fde 100644
--- a/lib/django-1.5/django/contrib/auth/tests/hashers.py
+++ b/lib/django-1.5/django/contrib/auth/tests/hashers.py
@@ -4,8 +4,7 @@
 from django.conf.global_settings import PASSWORD_HASHERS as default_hashers
 from django.contrib.auth.hashers import (is_password_usable,
     check_password, make_password, PBKDF2PasswordHasher, load_hashers,
-    PBKDF2SHA1PasswordHasher, get_hasher, identify_hasher, UNUSABLE_PASSWORD,
-    MAXIMUM_PASSWORD_LENGTH, password_max_length)
+    PBKDF2SHA1PasswordHasher, get_hasher, identify_hasher, UNUSABLE_PASSWORD)
 from django.utils import unittest
 from django.utils.unittest import skipUnless
 
@@ -17,6 +16,10 @@
 
 try:
     import bcrypt
+    # Django 1.5 works only with py-bcrypt, not with bcrypt. py-bcrypt has
+    # '_bcrypt' attribute, bcrypt doesn't.
+    if not hasattr(bcrypt, '_bcrypt'):
+        bcrypt = None
 except ImportError:
     bcrypt = None
 
@@ -32,12 +35,6 @@
         self.assertTrue(is_password_usable(encoded))
         self.assertTrue(check_password('lètmein', encoded))
         self.assertFalse(check_password('lètmeinz', encoded))
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-        )
 
     def test_pkbdf2(self):
         encoded = make_password('lètmein', 'seasalt', 'pbkdf2_sha256')
@@ -47,14 +44,6 @@
         self.assertTrue(check_password('lètmein', encoded))
         self.assertFalse(check_password('lètmeinz', encoded))
         self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256")
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "seasalt",
-            "pbkdf2_sha256",
-        )
 
     def test_sha1(self):
         encoded = make_password('lètmein', 'seasalt', 'sha1')
@@ -64,14 +53,6 @@
         self.assertTrue(check_password('lètmein', encoded))
         self.assertFalse(check_password('lètmeinz', encoded))
         self.assertEqual(identify_hasher(encoded).algorithm, "sha1")
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "seasalt",
-            "sha1",
-        )
 
     def test_md5(self):
         encoded = make_password('lètmein', 'seasalt', 'md5')
@@ -81,14 +62,6 @@
         self.assertTrue(check_password('lètmein', encoded))
         self.assertFalse(check_password('lètmeinz', encoded))
         self.assertEqual(identify_hasher(encoded).algorithm, "md5")
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "seasalt",
-            "md5",
-        )
 
     def test_unsalted_md5(self):
         encoded = make_password('lètmein', '', 'unsalted_md5')
@@ -102,14 +75,6 @@
         self.assertTrue(is_password_usable(alt_encoded))
         self.assertTrue(check_password('lètmein', alt_encoded))
         self.assertFalse(check_password('lètmeinz', alt_encoded))
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "",
-            "unsalted_md5",
-        )
 
     def test_unsalted_sha1(self):
         encoded = make_password('lètmein', '', 'unsalted_sha1')
@@ -121,14 +86,6 @@
         # Raw SHA1 isn't acceptable
         alt_encoded = encoded[6:]
         self.assertFalse(check_password('lètmein', alt_encoded))
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "",
-            "unslated_sha1",
-        )
 
     @skipUnless(crypt, "no crypt module to generate password.")
     def test_crypt(self):
@@ -138,14 +95,6 @@
         self.assertTrue(check_password('lètmei', encoded))
         self.assertFalse(check_password('lètmeiz', encoded))
         self.assertEqual(identify_hasher(encoded).algorithm, "crypt")
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            "seasalt",
-            "crypt",
-        )
 
     @skipUnless(bcrypt, "py-bcrypt not installed")
     def test_bcrypt(self):
@@ -155,13 +104,6 @@
         self.assertTrue(check_password('lètmein', encoded))
         self.assertFalse(check_password('lètmeinz', encoded))
         self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt")
-        # Long password
-        self.assertRaises(
-            ValueError,
-            make_password,
-            b"1" * (MAXIMUM_PASSWORD_LENGTH + 1),
-            hasher="bcrypt",
-        )
 
     def test_unusable(self):
         encoded = make_password(None)
@@ -183,14 +125,6 @@
         self.assertFalse(is_password_usable('lètmein_badencoded'))
         self.assertFalse(is_password_usable(''))
 
-    def test_max_password_length_decorator(self):
-        @password_max_length(10)
-        def encode(s, password, salt):
-            return True
-
-        self.assertTrue(encode(None, b"1234", b"1234"))
-        self.assertRaises(ValueError, encode, None, b"1234567890A", b"1234")
-
     def test_low_level_pkbdf2(self):
         hasher = PBKDF2PasswordHasher()
         encoded = hasher.encode('lètmein', 'seasalt')
diff --git a/lib/django-1.5/django/contrib/auth/tests/views.py b/lib/django-1.5/django/contrib/auth/tests/views.py
index 754fa35..ba2b48a 100644
--- a/lib/django-1.5/django/contrib/auth/tests/views.py
+++ b/lib/django-1.5/django/contrib/auth/tests/views.py
@@ -326,8 +326,10 @@
 
         # Those URLs should not pass the security check
         for bad_url in ('http://example.com',
+                        'http:///example.com',
                         'https://example.com',
                         'ftp://exampel.com',
+                        '///example.com',
                         '//example.com',
                         'javascript:alert("XSS")'):
 
@@ -349,8 +351,8 @@
                          '/view/?param=https://example.com',
                          '/view?param=ftp://exampel.com',
                          'view/?param=//example.com',
-                         'https:///',
-                         'HTTPS:///',
+                         'https://testserver/',
+                         'HTTPS://testserver/',
                          '//testserver/',
                          '/url%20with%20spaces/'):  # see ticket #12534
             safe_url = '%(url)s?%(next)s=%(good_url)s' % {
@@ -392,7 +394,6 @@
         CsrfViewMiddleware().process_view(req, login_view, (), {})
         req.META["SERVER_NAME"] = "testserver"  # Required to have redirect work in login view
         req.META["SERVER_PORT"] = 80
-        req.META["CSRF_COOKIE_USED"] = True
         resp = login_view(req)
         resp2 = CsrfViewMiddleware().process_response(req, resp)
         csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
@@ -522,8 +523,10 @@
 
         # Those URLs should not pass the security check
         for bad_url in ('http://example.com',
+                        'http:///example.com',
                         'https://example.com',
                         'ftp://exampel.com',
+                        '///example.com',
                         '//example.com',
                         'javascript:alert("XSS")'):
             nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
@@ -543,8 +546,8 @@
                          '/view/?param=https://example.com',
                          '/view?param=ftp://exampel.com',
                          'view/?param=//example.com',
-                         'https:///',
-                         'HTTPS:///',
+                         'https://testserver/',
+                         'HTTPS://testserver/',
                          '//testserver/',
                          '/url%20with%20spaces/'):  # see ticket #12534
             safe_url = '%(url)s?%(next)s=%(good_url)s' % {
diff --git a/lib/django-1.5/django/core/handlers/base.py b/lib/django-1.5/django/core/handlers/base.py
index 7fff71a..4819ad3 100644
--- a/lib/django-1.5/django/core/handlers/base.py
+++ b/lib/django-1.5/django/core/handlers/base.py
@@ -22,8 +22,6 @@
     response_fixes = [
         http.fix_location_header,
         http.conditional_content_removal,
-        http.fix_IE_for_attach,
-        http.fix_IE_for_vary,
     ]
 
     def __init__(self):
diff --git a/lib/django-1.5/django/core/management/commands/runserver.py b/lib/django-1.5/django/core/management/commands/runserver.py
index 391e0b4..740764d 100644
--- a/lib/django-1.5/django/core/management/commands/runserver.py
+++ b/lib/django-1.5/django/core/management/commands/runserver.py
@@ -1,12 +1,13 @@
 from optparse import make_option
 from datetime import datetime
+import errno
 import os
 import re
 import sys
 import socket
 
 from django.core.management.base import BaseCommand, CommandError
-from django.core.servers.basehttp import run, WSGIServerException, get_internal_wsgi_application
+from django.core.servers.basehttp import run, get_internal_wsgi_application
 from django.utils import autoreload
 
 naiveip_re = re.compile(r"""^(?:
@@ -112,16 +113,16 @@
             handler = self.get_handler(*args, **options)
             run(self.addr, int(self.port), handler,
                 ipv6=self.use_ipv6, threading=threading)
-        except WSGIServerException as e:
+        except socket.error as e:
             # Use helpful error messages instead of ugly tracebacks.
             ERRORS = {
-                13: "You don't have permission to access that port.",
-                98: "That port is already in use.",
-                99: "That IP address can't be assigned-to.",
+                errno.EACCES: "You don't have permission to access that port.",
+                errno.EADDRINUSE: "That port is already in use.",
+                errno.EADDRNOTAVAIL: "That IP address can't be assigned-to.",
             }
             try:
-                error_text = ERRORS[e.args[0].args[0]]
-            except (AttributeError, KeyError):
+                error_text = ERRORS[e.errno]
+            except KeyError:
                 error_text = str(e)
             self.stderr.write("Error: %s" % error_text)
             # Need to use an OS exit because sys.exit doesn't work in a thread
diff --git a/lib/django-1.5/django/core/servers/basehttp.py b/lib/django-1.5/django/core/servers/basehttp.py
index 68ca0c1..9aed637 100644
--- a/lib/django-1.5/django/core/servers/basehttp.py
+++ b/lib/django-1.5/django/core/servers/basehttp.py
@@ -67,10 +67,6 @@
     return app
 
 
-class WSGIServerException(Exception):
-    pass
-
-
 class ServerHandler(simple_server.ServerHandler, object):
     error_status = str("500 INTERNAL SERVER ERROR")
 
@@ -131,10 +127,7 @@
 
     def server_bind(self):
         """Override server_bind to store the server name."""
-        try:
-            super(WSGIServer, self).server_bind()
-        except Exception as e:
-            raise WSGIServerException(e)
+        super(WSGIServer, self).server_bind()
         self.setup_environ()
 
 
diff --git a/lib/django-1.5/django/core/urlresolvers.py b/lib/django-1.5/django/core/urlresolvers.py
index c657fd9..3e314e2 100644
--- a/lib/django-1.5/django/core/urlresolvers.py
+++ b/lib/django-1.5/django/core/urlresolvers.py
@@ -8,6 +8,7 @@
 """
 from __future__ import unicode_literals
 
+import functools
 import re
 from threading import local
 
@@ -244,6 +245,10 @@
         self._reverse_dict = {}
         self._namespace_dict = {}
         self._app_dict = {}
+        # set of dotted paths to all functions and classes that are used in
+        # urlpatterns
+        self._callback_strs = set()
+        self._populated = False
 
     def __repr__(self):
         if isinstance(self.urlconf_name, list) and len(self.urlconf_name):
@@ -261,6 +266,18 @@
         apps = {}
         language_code = get_language()
         for pattern in reversed(self.url_patterns):
+            if hasattr(pattern, '_callback_str'):
+                self._callback_strs.add(pattern._callback_str)
+            elif hasattr(pattern, '_callback'):
+                callback = pattern._callback
+                if isinstance(callback, functools.partial):
+                    callback = callback.func
+
+                if not hasattr(callback, '__name__'):
+                    lookup_str = callback.__module__ + "." + callback.__class__.__name__
+                else:
+                    lookup_str = callback.__module__ + "." + callback.__name__
+                self._callback_strs.add(lookup_str)
             p_pattern = pattern.regex.pattern
             if p_pattern.startswith('^'):
                 p_pattern = p_pattern[1:]
@@ -281,6 +298,7 @@
                         namespaces[namespace] = (p_pattern + prefix, sub_pattern)
                     for app_name, namespace_list in pattern.app_dict.items():
                         apps.setdefault(app_name, []).extend(namespace_list)
+                    self._callback_strs.update(pattern._callback_strs)
             else:
                 bits = normalize(p_pattern)
                 lookups.appendlist(pattern.callback, (bits, p_pattern, pattern.default_args))
@@ -289,6 +307,7 @@
         self._reverse_dict[language_code] = lookups
         self._namespace_dict[language_code] = namespaces
         self._app_dict[language_code] = apps
+        self._populated = True
 
     @property
     def reverse_dict(self):
@@ -375,8 +394,13 @@
     def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
         if args and kwargs:
             raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
+
+        if not self._populated:
+            self._populate()
+
         try:
-            lookup_view = get_callable(lookup_view, True)
+            if lookup_view in self._callback_strs:
+                lookup_view = get_callable(lookup_view, True)
         except (ImportError, AttributeError) as e:
             raise NoReverseMatch("Error importing '%s': %s." % (lookup_view, e))
         possibilities = self.reverse_dict.getlist(lookup_view)
diff --git a/lib/django-1.5/django/db/backends/__init__.py b/lib/django-1.5/django/db/backends/__init__.py
index 1decce0..d33744c 100644
--- a/lib/django-1.5/django/db/backends/__init__.py
+++ b/lib/django-1.5/django/db/backends/__init__.py
@@ -895,7 +895,7 @@
         Coerce the value returned by the database backend into a consistent type
         that is compatible with the field type.
         """
-        if value is None:
+        if value is None or field is None:
             return value
         internal_type = field.get_internal_type()
         if internal_type == 'FloatField':
diff --git a/lib/django-1.5/django/db/models/fields/__init__.py b/lib/django-1.5/django/db/models/fields/__init__.py
index 9949dfa..44d0c20 100644
--- a/lib/django-1.5/django/db/models/fields/__init__.py
+++ b/lib/django-1.5/django/db/models/fields/__init__.py
@@ -934,6 +934,12 @@
         kwargs['max_length'] = kwargs.get('max_length', 100)
         Field.__init__(self, verbose_name, name, **kwargs)
 
+    def get_prep_value(self, value):
+        value = super(FilePathField, self).get_prep_value(value)
+        if value is None:
+            return None
+        return six.text_type(value)
+
     def formfield(self, **kwargs):
         defaults = {
             'path': self.path,
@@ -1035,6 +1041,12 @@
         kwargs['max_length'] = 15
         Field.__init__(self, *args, **kwargs)
 
+    def get_prep_value(self, value):
+        value = super(IPAddressField, self).get_prep_value(value)
+        if value is None:
+            return None
+        return six.text_type(value)
+
     def get_internal_type(self):
         return "IPAddressField"
 
@@ -1072,12 +1084,14 @@
         return value or None
 
     def get_prep_value(self, value):
+        if value is None:
+            return value
         if value and ':' in value:
             try:
                 return clean_ipv6_address(value, self.unpack_ipv4)
             except exceptions.ValidationError:
                 pass
-        return value
+        return six.text_type(value)
 
     def formfield(self, **kwargs):
         defaults = {'form_class': forms.GenericIPAddressField}
diff --git a/lib/django-1.5/django/db/models/sql/compiler.py b/lib/django-1.5/django/db/models/sql/compiler.py
index 7ea4cd7..50034bc 100644
--- a/lib/django-1.5/django/db/models/sql/compiler.py
+++ b/lib/django-1.5/django/db/models/sql/compiler.py
@@ -774,6 +774,10 @@
             transaction.set_dirty(self.using)
         for rows in self.execute_sql(MULTI):
             for row in rows:
+                if has_aggregate_select:
+                    loaded_fields = self.query.get_loaded_field_names().get(self.query.model, set()) or self.query.select
+                    aggregate_start = len(self.query.extra_select) + len(loaded_fields)
+                    aggregate_end = aggregate_start + len(self.query.aggregate_select)
                 if resolve_columns:
                     if fields is None:
                         # We only set this up here because
@@ -797,14 +801,16 @@
                         # into `resolve_columns` because it wasn't selected.
                         only_load = self.deferred_to_columns()
                         if only_load:
-                            db_table = self.query.model._meta.db_table
-                            fields = [f for f in fields if db_table in only_load and
-                                      f.column in only_load[db_table]]
+                            fields = [f for f in fields if f.model._meta.db_table not in only_load or
+                                      f.column in only_load[f.model._meta.db_table]]
+                        if has_aggregate_select:
+                            # pad None in to fields for aggregates
+                            fields = fields[:aggregate_start] + [
+                                None for x in range(0, aggregate_end - aggregate_start)
+                            ] + fields[aggregate_start:]
                     row = self.resolve_columns(row, fields)
 
                 if has_aggregate_select:
-                    aggregate_start = len(self.query.extra_select) + len(self.query.select)
-                    aggregate_end = aggregate_start + len(self.query.aggregate_select)
                     row = tuple(row[:aggregate_start]) + tuple([
                         self.query.resolve_aggregate(value, aggregate, self.connection)
                         for (alias, aggregate), value
diff --git a/lib/django-1.5/django/http/__init__.py b/lib/django-1.5/django/http/__init__.py
index 46afa34..08d9d23 100644
--- a/lib/django-1.5/django/http/__init__.py
+++ b/lib/django-1.5/django/http/__init__.py
@@ -6,5 +6,4 @@
     HttpResponseRedirect, HttpResponseNotModified, HttpResponseBadRequest,
     HttpResponseForbidden, HttpResponseNotFound, HttpResponseNotAllowed,
     HttpResponseGone, HttpResponseServerError, Http404, BadHeaderError)
-from django.http.utils import (fix_location_header, conditional_content_removal,
-    fix_IE_for_attach, fix_IE_for_vary)
+from django.http.utils import fix_location_header, conditional_content_removal
diff --git a/lib/django-1.5/django/http/utils.py b/lib/django-1.5/django/http/utils.py
index fcb3fec..ace35eb 100644
--- a/lib/django-1.5/django/http/utils.py
+++ b/lib/django-1.5/django/http/utils.py
@@ -39,58 +39,3 @@
         else:
             response.content = ''
     return response
-
-
-def fix_IE_for_attach(request, response):
-    """
-    This function will prevent Django from serving a Content-Disposition header
-    while expecting the browser to cache it (only when the browser is IE). This
-    leads to IE not allowing the client to download.
-    """
-    useragent = request.META.get('HTTP_USER_AGENT', '').upper()
-    if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
-        return response
-
-    offending_headers = ('no-cache', 'no-store')
-    if response.has_header('Content-Disposition'):
-        try:
-            del response['Pragma']
-        except KeyError:
-            pass
-        if response.has_header('Cache-Control'):
-            cache_control_values = [value.strip() for value in
-                    response['Cache-Control'].split(',')
-                    if value.strip().lower() not in offending_headers]
-
-            if not len(cache_control_values):
-                del response['Cache-Control']
-            else:
-                response['Cache-Control'] = ', '.join(cache_control_values)
-
-    return response
-
-
-def fix_IE_for_vary(request, response):
-    """
-    This function will fix the bug reported at
-    http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global
-    by clearing the Vary header whenever the mime-type is not safe
-    enough for Internet Explorer to handle.  Poor thing.
-    """
-    useragent = request.META.get('HTTP_USER_AGENT', '').upper()
-    if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
-        return response
-
-    # These mime-types that are decreed "Vary-safe" for IE:
-    safe_mime_types = ('text/html', 'text/plain', 'text/sgml')
-
-    # The first part of the Content-Type field will be the MIME type,
-    # everything after ';', such as character-set, can be ignored.
-    mime_type = response.get('Content-Type', '').partition(';')[0]
-    if mime_type not in safe_mime_types:
-        try:
-            del response['Vary']
-        except KeyError:
-            pass
-
-    return response
diff --git a/lib/django-1.5/django/middleware/cache.py b/lib/django-1.5/django/middleware/cache.py
index 94c18ea..611d9f3 100644
--- a/lib/django-1.5/django/middleware/cache.py
+++ b/lib/django-1.5/django/middleware/cache.py
@@ -50,7 +50,8 @@
 
 from django.conf import settings
 from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
-from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age
+from django.utils.cache import (get_cache_key, get_max_age, has_vary_header,
+    learn_cache_key, patch_response_headers)
 
 
 class UpdateCacheMiddleware(object):
@@ -93,8 +94,15 @@
         if not self._should_update_cache(request, response):
             # We don't need to update the cache, just return.
             return response
+
         if response.streaming or response.status_code != 200:
             return response
+
+        # Don't cache responses that set a user-specific (and maybe security
+        # sensitive) cookie in response to a cookie-less request.
+        if not request.COOKIES and response.cookies and has_vary_header(response, 'Cookie'):
+            return response
+
         # Try to get the timeout from the "max-age" section of the "Cache-
         # Control" header before reverting to using the default cache_timeout
         # length.
diff --git a/lib/django-1.5/django/middleware/csrf.py b/lib/django-1.5/django/middleware/csrf.py
index c7c25bf..02d92dd 100644
--- a/lib/django-1.5/django/middleware/csrf.py
+++ b/lib/django-1.5/django/middleware/csrf.py
@@ -58,7 +58,10 @@
     Changes the CSRF token in use for a request - should be done on login
     for security purposes.
     """
-    request.META["CSRF_COOKIE"] = _get_new_csrf_key()
+    request.META.update({
+        "CSRF_COOKIE_USED": True,
+        "CSRF_COOKIE": _get_new_csrf_key(),
+    })
 
 
 def _sanitize_token(token):
diff --git a/lib/django-1.5/django/test/testcases.py b/lib/django-1.5/django/test/testcases.py
index e556d18..99f9794 100644
--- a/lib/django-1.5/django/test/testcases.py
+++ b/lib/django-1.5/django/test/testcases.py
@@ -4,6 +4,7 @@
 import json
 import os
 import re
+import socket
 import sys
 from copy import copy
 from functools import wraps
@@ -24,8 +25,7 @@
 from django.core.management import call_command
 from django.core.management.color import no_style
 from django.core.signals import request_started
-from django.core.servers.basehttp import (WSGIRequestHandler, WSGIServer,
-    WSGIServerException)
+from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer
 from django.core.urlresolvers import clear_url_caches
 from django.core.validators import EMPTY_VALUES
 from django.db import (transaction, connection, connections, DEFAULT_DB_ALIAS,
@@ -1064,10 +1064,9 @@
                 try:
                     self.httpd = StoppableWSGIServer(
                         (self.host, port), QuietWSGIRequestHandler)
-                except WSGIServerException as e:
+                except socket.error as e:
                     if (index + 1 < len(self.possible_ports) and
-                        hasattr(e.args[0], 'errno') and
-                        e.args[0].errno == errno.EADDRINUSE):
+                        e.errno == errno.EADDRINUSE):
                         # This port is already in use, so we go on and try with
                         # the next one in the list.
                         continue
@@ -1157,12 +1156,15 @@
         # Wait for the live server to be ready
         cls.server_thread.is_ready.wait()
         if cls.server_thread.error:
+            # Clean up behind ourselves, since tearDownClass won't get called in
+            # case of errors.
+            cls._tearDownClassInternal()
             raise cls.server_thread.error
 
         super(LiveServerTestCase, cls).setUpClass()
 
     @classmethod
-    def tearDownClass(cls):
+    def _tearDownClassInternal(cls):
         # There may not be a 'server_thread' attribute if setUpClass() for some
         # reasons has raised an exception.
         if hasattr(cls, 'server_thread'):
@@ -1175,4 +1177,7 @@
                 and conn.settings_dict['NAME'] == ':memory:'):
                 conn.allow_thread_sharing = False
 
+    @classmethod
+    def tearDownClass(cls):
+        cls._tearDownClassInternal()
         super(LiveServerTestCase, cls).tearDownClass()
diff --git a/lib/django-1.5/django/utils/cache.py b/lib/django-1.5/django/utils/cache.py
index ecf0f83..343dc6b 100644
--- a/lib/django-1.5/django/utils/cache.py
+++ b/lib/django-1.5/django/utils/cache.py
@@ -168,7 +168,7 @@
         cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
     if settings.USE_TZ:
         # The datetime module doesn't restrict the output of tzname().
-        # Windows is known to use non-standard, locale-dependant names.
+        # Windows is known to use non-standard, locale-dependent names.
         # User-defined tzinfo classes may return absolutely anything.
         # Hence this paranoid conversion to create a valid cache key.
         tz_name = force_text(get_current_timezone_name(), errors='ignore')
diff --git a/lib/django-1.5/django/utils/crypto.py b/lib/django-1.5/django/utils/crypto.py
index 94f717b..878287a 100644
--- a/lib/django-1.5/django/utils/crypto.py
+++ b/lib/django-1.5/django/utils/crypto.py
@@ -115,23 +115,6 @@
     return binascii.unhexlify((hex_format_string % x).encode('ascii'))
 
 
-def _fast_hmac(key, msg, digest):
-    """
-    A trimmed down version of Python's HMAC implementation.
-
-    This function operates on bytes.
-    """
-    dig1, dig2 = digest(), digest()
-    if len(key) > dig1.block_size:
-        key = digest(key).digest()
-    key += b'\x00' * (dig1.block_size - len(key))
-    dig1.update(key.translate(_trans_36))
-    dig1.update(msg)
-    dig2.update(key.translate(_trans_5c))
-    dig2.update(dig1.digest())
-    return dig2
-
-
 def pbkdf2(password, salt, iterations, dklen=0, digest=None):
     """
     Implements PBKDF2 as defined in RFC 2898, section 5.2
@@ -159,11 +142,21 @@
 
     hex_format_string = "%%0%ix" % (hlen * 2)
 
+    inner, outer = digest(), digest()
+    if len(password) > inner.block_size:
+        password = digest(password).digest()
+    password += b'\x00' * (inner.block_size - len(password))
+    inner.update(password.translate(hmac.trans_36))
+    outer.update(password.translate(hmac.trans_5C))
+
     def F(i):
         def U():
             u = salt + struct.pack(b'>I', i)
             for j in xrange(int(iterations)):
-                u = _fast_hmac(password, u, digest).digest()
+                dig1, dig2 = inner.copy(), outer.copy()
+                dig1.update(u)
+                dig2.update(dig1.digest())
+                u = dig2.digest()
                 yield _bin_to_long(u)
         return _long_to_bin(reduce(operator.xor, U()), hex_format_string)
 
diff --git a/lib/django-1.5/django/utils/datetime_safe.py b/lib/django-1.5/django/utils/datetime_safe.py
index b634888..ca96fb3 100644
--- a/lib/django-1.5/django/utils/datetime_safe.py
+++ b/lib/django-1.5/django/utils/datetime_safe.py
@@ -19,8 +19,11 @@
     def strftime(self, fmt):
         return strftime(self, fmt)
 
-    def combine(self, date, time):
-        return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo)
+    @classmethod
+    def combine(cls, date, time):
+        return cls(date.year, date.month, date.day,
+                   time.hour, time.minute, time.second,
+                   time.microsecond, time.tzinfo)
 
     def date(self):
         return date(self.year, self.month, self.day)
diff --git a/lib/django-1.5/django/utils/feedgenerator.py b/lib/django-1.5/django/utils/feedgenerator.py
index f9126a6..d07fd66 100644
--- a/lib/django-1.5/django/utils/feedgenerator.py
+++ b/lib/django-1.5/django/utils/feedgenerator.py
@@ -36,7 +36,7 @@
 from django.utils.timezone import is_aware
 
 def rfc2822_date(date):
-    # We can't use strftime() because it produces locale-dependant results, so
+    # We can't use strftime() because it produces locale-dependent results, so
     # we have to map english month and day names manually
     months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',)
     days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
diff --git a/lib/django-1.5/django/utils/http.py b/lib/django-1.5/django/utils/http.py
index f376b1c..67912f7 100644
--- a/lib/django-1.5/django/utils/http.py
+++ b/lib/django-1.5/django/utils/http.py
@@ -237,6 +237,18 @@
     """
     if not url:
         return False
+    # Chrome treats \ completely as /
+    url = url.replace('\\', '/')
+    # Chrome considers any URL with more than two slashes to be absolute, but
+    # urlaprse is not so flexible. Treat any url with three slashes as unsafe.
+    if url.startswith('///'):
+        return False
     url_info = urllib_parse.urlparse(url)
+    # Forbid URLs like http:///example.com - with a scheme, but without a hostname.
+    # In that URL, example.com is not the hostname but, a path component. However,
+    # Chrome will still consider example.com to be the hostname, so we must not
+    # allow this syntax.
+    if not url_info.netloc and url_info.scheme:
+        return False
     return (not url_info.netloc or url_info.netloc == host) and \
         (not url_info.scheme or url_info.scheme in ['http', 'https'])
diff --git a/lib/django-1.5/django/utils/six.py b/lib/django-1.5/django/utils/six.py
index 06d9b4a..26370d7 100644
--- a/lib/django-1.5/django/utils/six.py
+++ b/lib/django-1.5/django/utils/six.py
@@ -1,33 +1,35 @@
 """Utilities for writing code that runs on Python 2 and 3"""
 
-# Copyright (c) 2010-2013 Benjamin Peterson
+# Copyright (c) 2010-2014 Benjamin Peterson
 #
-# Permission is hereby granted, free of charge, to any person obtaining a copy of
-# this software and associated documentation files (the "Software"), to deal in
-# the Software without restriction, including without limitation the rights to
-# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-# the Software, and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
 #
 # The above copyright notice and this permission notice shall be included in all
 # copies or substantial portions of the Software.
 #
 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
 
 import operator
 import sys
 import types
 
 __author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.2.0"
+__version__ = "1.6.1"
 
 
-# True if we are running on Python 3.
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
 PY3 = sys.version_info[0] == 3
 
 if PY3:
@@ -61,7 +63,7 @@
         else:
             # 64-bit
             MAXSIZE = int((1 << 63) - 1)
-            del X
+        del X
 
 
 def _add_doc(func, doc):
@@ -81,10 +83,14 @@
         self.name = name
 
     def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)
+        try:
+            result = self._resolve()
+        except ImportError:
+            # See the nice big comment in MovedModule.__getattr__.
+            raise AttributeError("%s could not be imported " % self.name)
+        setattr(obj, self.name, result) # Invokes __set__.
         # This is a bit ugly, but it avoids running this again.
-        delattr(tp, self.name)
+        delattr(obj.__class__, self.name)
         return result
 
 
@@ -102,6 +108,42 @@
     def _resolve(self):
         return _import_module(self.mod)
 
+    def __getattr__(self, attr):
+        # It turns out many Python frameworks like to traverse sys.modules and
+        # try to load various attributes. This causes problems if this is a
+        # platform-specific module on the wrong platform, like _winreg on
+        # Unixes. Therefore, we silently pretend unimportable modules do not
+        # have any attributes. See issues #51, #53, #56, and #63 for the full
+        # tales of woe.
+        #
+        # First, if possible, avoid loading the module just to look at __file__,
+        # __name__, or __path__.
+        if (attr in ("__file__", "__name__", "__path__") and
+            self.mod not in sys.modules):
+            raise AttributeError(attr)
+        try:
+            _module = self._resolve()
+        except ImportError:
+            raise AttributeError(attr)
+        value = getattr(_module, attr)
+        setattr(self, attr, value)
+        return value
+
+
+class _LazyModule(types.ModuleType):
+
+    def __init__(self, name):
+        super(_LazyModule, self).__init__(name)
+        self.__doc__ = self.__class__.__doc__
+
+    def __dir__(self):
+        attrs = ["__doc__", "__name__"]
+        attrs += [attr.name for attr in self._moved_attributes]
+        return attrs
+
+    # Subclasses should override this
+    _moved_attributes = []
+
 
 class MovedAttribute(_LazyDescr):
 
@@ -129,24 +171,29 @@
 
 
 
-class _MovedItems(types.ModuleType):
+class _MovedItems(_LazyModule):
     """Lazy loading of moved objects"""
 
 
 _moved_attributes = [
     MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
     MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
     MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
     MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
     MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
     MovedAttribute("reduce", "__builtin__", "functools"),
     MovedAttribute("StringIO", "StringIO", "io"),
+    MovedAttribute("UserString", "UserString", "collections"),
     MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
     MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
 
     MovedModule("builtins", "__builtin__"),
     MovedModule("configparser", "ConfigParser"),
     MovedModule("copyreg", "copy_reg"),
+    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
     MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
     MovedModule("http_cookies", "Cookie", "http.cookies"),
     MovedModule("html_entities", "htmlentitydefs", "html.entities"),
@@ -162,12 +209,14 @@
     MovedModule("queue", "Queue"),
     MovedModule("reprlib", "repr"),
     MovedModule("socketserver", "SocketServer"),
+    MovedModule("_thread", "thread", "_thread"),
     MovedModule("tkinter", "Tkinter"),
     MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
     MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
     MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
     MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
     MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
     MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
     MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
     MovedModule("tkinter_colorchooser", "tkColorChooser",
@@ -179,14 +228,170 @@
     MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
     MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
                 "tkinter.simpledialog"),
+    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
     MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+    MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
     MovedModule("winreg", "_winreg"),
 ]
 for attr in _moved_attributes:
     setattr(_MovedItems, attr.name, attr)
+    if isinstance(attr, MovedModule):
+        sys.modules[__name__ + ".moves." + attr.name] = attr
 del attr
 
-moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+    MovedAttribute("quote", "urllib", "urllib.parse"),
+    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("unquote", "urllib", "urllib.parse"),
+    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+    MovedAttribute("urlencode", "urllib", "urllib.parse"),
+    MovedAttribute("splitquery", "urllib", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+    setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+    MovedAttribute("URLError", "urllib2", "urllib.error"),
+    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+    setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+    MovedAttribute("urlopen", "urllib2", "urllib.request"),
+    MovedAttribute("install_opener", "urllib2", "urllib.request"),
+    MovedAttribute("build_opener", "urllib2", "urllib.request"),
+    MovedAttribute("pathname2url", "urllib", "urllib.request"),
+    MovedAttribute("url2pathname", "urllib", "urllib.request"),
+    MovedAttribute("getproxies", "urllib", "urllib.request"),
+    MovedAttribute("Request", "urllib2", "urllib.request"),
+    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+    MovedAttribute("URLopener", "urllib", "urllib.request"),
+    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+    setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+    MovedAttribute("addbase", "urllib", "urllib.response"),
+    MovedAttribute("addclosehook", "urllib", "urllib.response"),
+    MovedAttribute("addinfo", "urllib", "urllib.response"),
+    MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+    setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+    parse = sys.modules[__name__ + ".moves.urllib_parse"]
+    error = sys.modules[__name__ + ".moves.urllib_error"]
+    request = sys.modules[__name__ + ".moves.urllib_request"]
+    response = sys.modules[__name__ + ".moves.urllib_response"]
+    robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
+
+    def __dir__(self):
+        return ['parse', 'error', 'request', 'response', 'robotparser']
+
+
+sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
 
 
 def add_move(move):
@@ -252,11 +457,16 @@
     def get_unbound_function(unbound):
         return unbound
 
+    create_bound_method = types.MethodType
+
     Iterator = object
 else:
     def get_unbound_function(unbound):
         return unbound.im_func
 
+    def create_bound_method(func, obj):
+        return types.MethodType(func, obj, obj.__class__)
+
     class Iterator(object):
 
         def next(self):
@@ -297,21 +507,33 @@
         return s.encode("latin-1")
     def u(s):
         return s
+    unichr = chr
     if sys.version_info[1] <= 1:
         def int2byte(i):
             return bytes((i,))
     else:
         # This is about 2x faster than the implementation above on 3.2+
         int2byte = operator.methodcaller("to_bytes", 1, "big")
+    byte2int = operator.itemgetter(0)
+    indexbytes = operator.getitem
+    iterbytes = iter
     import io
     StringIO = io.StringIO
     BytesIO = io.BytesIO
 else:
     def b(s):
         return s
+    # Workaround for standalone backslash
     def u(s):
-        return unicode(s, "unicode_escape")
+        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+    unichr = unichr
     int2byte = chr
+    def byte2int(bs):
+        return ord(bs[0])
+    def indexbytes(buf, i):
+        return ord(buf[i])
+    def iterbytes(buf):
+        return (ord(byte) for byte in buf)
     import StringIO
     StringIO = BytesIO = StringIO.StringIO
 _add_doc(b, """Byte literal""")
@@ -319,8 +541,7 @@
 
 
 if PY3:
-    import builtins
-    exec_ = getattr(builtins, "exec")
+    exec_ = getattr(moves.builtins, "exec")
 
 
     def reraise(tp, value, tb=None):
@@ -328,10 +549,6 @@
             raise value.with_traceback(tb)
         raise value
 
-
-    print_ = getattr(builtins, "print")
-    del builtins
-
 else:
     def exec_(_code_, _globs_=None, _locs_=None):
         """Execute code in a namespace."""
@@ -351,14 +568,24 @@
 """)
 
 
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
     def print_(*args, **kwargs):
-        """The new-style print function."""
+        """The new-style print function for Python 2.4 and 2.5."""
         fp = kwargs.pop("file", sys.stdout)
         if fp is None:
             return
         def write(data):
             if not isinstance(data, basestring):
                 data = str(data)
+            # If the file has an encoding, encode unicode with it.
+            if (isinstance(fp, file) and
+                isinstance(data, unicode) and
+                fp.encoding is not None):
+                errors = getattr(fp, "errors", None)
+                if errors is None:
+                    errors = "strict"
+                data = data.encode(fp.encoding, errors)
             fp.write(data)
         want_unicode = False
         sep = kwargs.pop("sep", None)
@@ -399,22 +626,51 @@
 _add_doc(reraise, """Reraise an exception.""")
 
 
-def with_metaclass(meta, base=object):
+def with_metaclass(meta, *bases):
     """Create a base class with a metaclass."""
-    return meta("NewBase", (base,), {})
+    return meta("NewBase", bases, {})
+
+def add_metaclass(metaclass):
+    """Class decorator for creating a class with a metaclass."""
+    def wrapper(cls):
+        orig_vars = cls.__dict__.copy()
+        orig_vars.pop('__dict__', None)
+        orig_vars.pop('__weakref__', None)
+        slots = orig_vars.get('__slots__')
+        if slots is not None:
+            if isinstance(slots, str):
+                slots = [slots]
+            for slots_var in slots:
+                orig_vars.pop(slots_var)
+        return metaclass(cls.__name__, cls.__bases__, orig_vars)
+    return wrapper
 
 
 ### Additional customizations for Django ###
 
 if PY3:
     _assertRaisesRegex = "assertRaisesRegex"
+    _assertRegex = "assertRegex"
+    memoryview = memoryview
 else:
     _assertRaisesRegex = "assertRaisesRegexp"
+    _assertRegex = "assertRegexpMatches"
+    # memoryview and buffer are not stricly equivalent, but should be fine for
+    # django core usage (mainly BinaryField). However, Jython doesn't support
+    # buffer (see http://bugs.jython.org/issue1521), so we have to be careful.
+    if sys.platform.startswith('java'):
+        memoryview = memoryview
+    else:
+        memoryview = buffer
 
 
 def assertRaisesRegex(self, *args, **kwargs):
     return getattr(self, _assertRaisesRegex)(*args, **kwargs)
 
 
+def assertRegex(self, *args, **kwargs):
+    return getattr(self, _assertRegex)(*args, **kwargs)
+
+
 add_move(MovedModule("_dummy_thread", "dummy_thread"))
 add_move(MovedModule("_thread", "thread"))
diff --git a/lib/django-1.5/django/utils/text.py b/lib/django-1.5/django/utils/text.py
index d75ca8d..03cfd55 100644
--- a/lib/django-1.5/django/utils/text.py
+++ b/lib/django-1.5/django/utils/text.py
@@ -381,12 +381,12 @@
                 c = int(text[1:], 16)
             else:
                 c = int(text)
-            return unichr(c)
+            return six.unichr(c)
         except ValueError:
             return match.group(0)
     else:
         try:
-            return unichr(html_entities.name2codepoint[text])
+            return six.unichr(html_entities.name2codepoint[text])
         except (ValueError, KeyError):
             return match.group(0)
 
diff --git a/lib/django-1.5/docs/_ext/djangodocs.py b/lib/django-1.5/docs/_ext/djangodocs.py
index 29ff505..3605270 100644
--- a/lib/django-1.5/docs/_ext/djangodocs.py
+++ b/lib/django-1.5/docs/_ext/djangodocs.py
@@ -99,11 +99,13 @@
         self.compact_p = self.context.pop()
         self.body.append('</table>\n')
 
-    # <big>? Really?
     def visit_desc_parameterlist(self, node):
-        self.body.append('(')
+        self.body.append('(')  # by default sphinx puts <big> around the "("
         self.first_param = 1
+        self.optional_param_level = 0
         self.param_separator = node.child_text_separator
+        self.required_params_left = sum([isinstance(c, addnodes.desc_parameter)
+                                         for c in node.children])
 
     def depart_desc_parameterlist(self, node):
         self.body.append(')')
diff --git a/lib/django-1.5/docs/_theme/djangodocs/layout.html b/lib/django-1.5/docs/_theme/djangodocs/layout.html
index ef91dd7..caf990c 100644
--- a/lib/django-1.5/docs/_theme/djangodocs/layout.html
+++ b/lib/django-1.5/docs/_theme/djangodocs/layout.html
@@ -17,6 +17,9 @@
 {%- endmacro %}
 
 {% block extrahead %}
+{# When building htmlhelp (CHM format)  disable JQuery inclusion,  #}
+{# as it causes problems in compiled CHM files.                    #}
+{% if builder != "htmlhelp" %}
 {{ super() }}
 <script type="text/javascript" src="{{ pathto('templatebuiltins.js', 1) }}"></script>
 <script type="text/javascript">
@@ -51,6 +54,7 @@
     });
 })(jQuery);
 </script>
+{% endif %}
 {% endblock %}
 
 {% block document %}
diff --git a/lib/django-1.5/docs/_theme/djangodocs/static/djangodocs.css b/lib/django-1.5/docs/_theme/djangodocs/static/djangodocs.css
index bab81cd..9467d09 100644
--- a/lib/django-1.5/docs/_theme/djangodocs/static/djangodocs.css
+++ b/lib/django-1.5/docs/_theme/djangodocs/static/djangodocs.css
@@ -64,6 +64,8 @@
 ol { padding-left:30px; }
 ol.arabic li { list-style-type: decimal; }
 ul li { list-style-type:square; margin-bottom:.4em; }
+ul ul li { list-style-type:disc; }
+ul ul ul li { list-style-type:circle; }
 ol li { margin-bottom: .4em; }
 ul ul { padding-left:1.2em; }
 ul ul ul { padding-left:1em; }
diff --git a/lib/django-1.5/docs/conf.py b/lib/django-1.5/docs/conf.py
index 9c5a29e..5a030a1 100644
--- a/lib/django-1.5/docs/conf.py
+++ b/lib/django-1.5/docs/conf.py
@@ -52,9 +52,9 @@
 # built documents.
 #
 # The short X.Y version.
-version = '1.5.4'
+version = '1.5.8'
 # The full version, including alpha/beta/rc tags.
-release = '1.5.4'
+release = version
 # The next version to be released
 django_next_version = '1.6'
 
diff --git a/lib/django-1.5/docs/howto/custom-model-fields.txt b/lib/django-1.5/docs/howto/custom-model-fields.txt
index e87d4f7..5dee95e 100644
--- a/lib/django-1.5/docs/howto/custom-model-fields.txt
+++ b/lib/django-1.5/docs/howto/custom-model-fields.txt
@@ -193,7 +193,7 @@
     you want your fields to be more strict about the options they select, or to
     use the simpler, more permissive behavior of the current fields.
 
-.. method:: Field.__init__
+.. method:: Field.__init__()
 
 The :meth:`~django.db.models.Field.__init__` method takes the following
 parameters:
@@ -332,7 +332,7 @@
 Custom database types
 ~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.db_type(self, connection)
+.. method:: Field.db_type(connection)
 
 Returns the database column data type for the :class:`~django.db.models.Field`,
 taking into account the connection object, and the settings associated with it.
@@ -418,7 +418,7 @@
 Converting database values to Python objects
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.to_python(self, value)
+.. method:: Field.to_python(value)
 
 Converts a value as returned by your database (or a serializer) to a Python
 object.
@@ -479,7 +479,7 @@
 Converting Python objects to query values
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.get_prep_value(self, value)
+.. method:: Field.get_prep_value(value)
 
 This is the reverse of :meth:`.to_python` when working with the
 database backends (as opposed to serialization). The ``value``
@@ -501,10 +501,20 @@
             return ''.join([''.join(l) for l in (value.north,
                     value.east, value.south, value.west)])
 
+.. warning::
+
+    If your custom field uses the ``CHAR``, ``VARCHAR`` or ``TEXT``
+    types for MySQL, you must make sure that :meth:`.get_prep_value`
+    always returns a string type. MySQL performs flexible and unexpected
+    matching when a query is performed on these types and the provided
+    value is an integer, which can cause queries to include unexpected
+    objects in their results. This problem cannot occur if you always
+    return a string type from :meth:`.get_prep_value`.
+
 Converting query values to database values
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.get_db_prep_value(self, value, connection, prepared=False)
+.. method:: Field.get_db_prep_value(value, connection, prepared=False)
 
 Some data types (for example, dates) need to be in a specific format
 before they can be used by a database backend.
@@ -520,7 +530,7 @@
 initial data conversions before performing any database-specific
 processing.
 
-.. method:: Field.get_db_prep_save(self, value, connection)
+.. method:: Field.get_db_prep_save(value, connection)
 
 Same as the above, but called when the Field value must be *saved* to
 the database. As the default implementation just calls
@@ -532,7 +542,7 @@
 Preprocessing values before saving
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.pre_save(self, model_instance, add)
+.. method:: Field.pre_save(model_instance, add)
 
 This method is called just prior to :meth:`.get_db_prep_save` and should return
 the value of the appropriate attribute from ``model_instance`` for this field.
@@ -558,7 +568,7 @@
 As with value conversions, preparing a value for database lookups is a
 two phase process.
 
-.. method:: Field.get_prep_lookup(self, lookup_type, value)
+.. method:: Field.get_prep_lookup(lookup_type, value)
 
 :meth:`.get_prep_lookup` performs the first phase of lookup preparation,
 performing generic data validity checks
@@ -606,7 +616,7 @@
             else:
                 raise TypeError('Lookup type %r not supported.' % lookup_type)
 
-.. method:: Field.get_db_prep_lookup(self, lookup_type, value, connection, prepared=False)
+.. method:: Field.get_db_prep_lookup(lookup_type, value, connection, prepared=False)
 
 Performs any database-specific data conversions required by a lookup.
 As with :meth:`.get_db_prep_value`, the specific connection that will
@@ -617,7 +627,7 @@
 Specifying the form field for a model field
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.formfield(self, form_class=forms.CharField, **kwargs)
+.. method:: Field.formfield(form_class=forms.CharField, **kwargs)
 
 Returns the default form field to use when this field is displayed in a model.
 This method is called by the :class:`~django.forms.ModelForm` helper.
@@ -652,7 +662,7 @@
 Emulating built-in field types
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.get_internal_type(self)
+.. method:: Field.get_internal_type()
 
 Returns a string giving the name of the :class:`~django.db.models.Field`
 subclass we are emulating at the database level. This is used to determine the
@@ -686,7 +696,7 @@
 Converting field data for serialization
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Field.value_to_string(self, obj)
+.. method:: Field.value_to_string(obj)
 
 This method is used by the serializers to convert the field into a string for
 output. Calling ``Field._get_val_from_obj(obj)`` is the best way to get the
diff --git a/lib/django-1.5/docs/howto/custom-template-tags.txt b/lib/django-1.5/docs/howto/custom-template-tags.txt
index 30bb6b1..a225bf6 100644
--- a/lib/django-1.5/docs/howto/custom-template-tags.txt
+++ b/lib/django-1.5/docs/howto/custom-template-tags.txt
@@ -117,7 +117,7 @@
 Registering custom filters
 ~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: django.template.Library.filter
+.. method:: django.template.Library.filter()
 
 Once you've written your filter definition, you need to register it with
 your ``Library`` instance, to make it available to Django's template language:
@@ -156,7 +156,7 @@
 Template filters that expect strings
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: django.template.defaultfilters.stringfilter
+.. method:: django.template.defaultfilters.stringfilter()
 
 If you're writing a template filter that only expects a string as the first
 argument, you should use the decorator ``stringfilter``. This will
@@ -495,8 +495,12 @@
 
 * The ``render()`` method is where the work actually happens.
 
-* ``render()`` should never raise ``TemplateSyntaxError`` or any other
-  exception. It should fail silently, just as template filters should.
+* ``render()`` should generally fail silently, particularly in a production
+  environment where :setting:`DEBUG` and :setting:`TEMPLATE_DEBUG` are
+  ``False``. In some cases however, particularly if :setting:`TEMPLATE_DEBUG` is
+  ``True``, this method may raise an exception to make debugging easier. For
+  example, several core tags raise ``django.template.TemplateSyntaxError``
+  if they receive the wrong number or type of arguments.
 
 Ultimately, this decoupling of compilation and rendering results in an
 efficient template system, because a template can render multiple contexts
@@ -729,7 +733,7 @@
 Simple tags
 ~~~~~~~~~~~
 
-.. method:: django.template.Library.simple_tag
+.. method:: django.template.Library.simple_tag()
 
 Many template tags take a number of arguments -- strings or template variables
 -- and return a string after doing some processing based solely on
diff --git a/lib/django-1.5/docs/howto/deployment/wsgi/apache-auth.txt b/lib/django-1.5/docs/howto/deployment/wsgi/apache-auth.txt
index 2206459..fbde30f 100644
--- a/lib/django-1.5/docs/howto/deployment/wsgi/apache-auth.txt
+++ b/lib/django-1.5/docs/howto/deployment/wsgi/apache-auth.txt
@@ -38,6 +38,7 @@
 .. code-block:: apache
 
     WSGIScriptAlias / /path/to/mysite.com/mysite/wsgi.py
+    WSGIPythonPath /path/to/mysite.com
 
     WSGIProcessGroup %{GLOBAL}
     WSGIApplicationGroup django
@@ -76,7 +77,6 @@
 .. code-block:: python
 
     import os
-    import sys
 
     os.environ['DJANGO_SETTINGS_MODULE'] = 'mysite.settings'
 
diff --git a/lib/django-1.5/docs/howto/deployment/wsgi/modwsgi.txt b/lib/django-1.5/docs/howto/deployment/wsgi/modwsgi.txt
index 99e1733..5234d33 100644
--- a/lib/django-1.5/docs/howto/deployment/wsgi/modwsgi.txt
+++ b/lib/django-1.5/docs/howto/deployment/wsgi/modwsgi.txt
@@ -18,15 +18,16 @@
 the details about how to use mod_wsgi. You'll probably want to start with the
 `installation and configuration documentation`_.
 
-.. _official mod_wsgi documentation: http://www.modwsgi.org/
-.. _installation and configuration documentation: http://www.modwsgi.org/wiki/InstallationInstructions
+.. _official mod_wsgi documentation: http://code.google.com/p/modwsgi/
+.. _installation and configuration documentation: http://code.google.com/p/modwsgi/wiki/InstallationInstructions
 
 Basic configuration
 ===================
 
 Once you've got mod_wsgi installed and activated, edit your Apache server's
 ``httpd.conf`` file and add the following. If you are using a version of Apache
-older than 2.4, replace ``Require all granted`` with ``Allow from all``.
+older than 2.4, replace ``Require all granted`` with ``Allow from all`` and
+also add the line ``Order deny,allow`` above it.
 
 .. code-block:: apache
 
@@ -35,7 +36,6 @@
 
     <Directory /path/to/mysite.com/mysite>
     <Files wsgi.py>
-    Order deny,allow
     Require all granted
     </Files>
     </Directory>
@@ -139,24 +139,25 @@
     Alias /static/ /path/to/mysite.com/static/
 
     <Directory /path/to/mysite.com/static>
-    Order deny,allow
-    Allow from all
+    Require all granted
     </Directory>
 
     <Directory /path/to/mysite.com/media>
-    Order deny,allow
-    Allow from all
+    Require all granted
     </Directory>
 
     WSGIScriptAlias / /path/to/mysite.com/mysite/wsgi.py
 
     <Directory /path/to/mysite.com/mysite>
     <Files wsgi.py>
-    Order allow,deny
-    Allow from all
+    Require all granted
     </Files>
     </Directory>
 
+If you are using a version of Apache older than 2.4, replace
+``Require all granted`` with ``Allow from all`` and also add the line
+``Order deny,allow`` above it.
+
 .. _lighttpd: http://www.lighttpd.net/
 .. _Nginx: http://wiki.nginx.org/Main
 .. _TUX: http://en.wikipedia.org/wiki/TUX_web_server
diff --git a/lib/django-1.5/docs/howto/error-reporting.txt b/lib/django-1.5/docs/howto/error-reporting.txt
index 6239972..0837e99 100644
--- a/lib/django-1.5/docs/howto/error-reporting.txt
+++ b/lib/django-1.5/docs/howto/error-reporting.txt
@@ -119,6 +119,8 @@
 Filtering sensitive information
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+.. currentmodule:: django.views.decorators.debug
+
 Error reports are really helpful for debugging errors, so it is generally
 useful to record as much relevant information about those errors as possible.
 For example, by default Django records the `full traceback`_ for the
@@ -246,30 +248,32 @@
             request.exception_reporter_filter = CustomExceptionReporterFilter()
         ...
 
+.. currentmodule:: django.views.debug
+
 Your custom filter class needs to inherit from
 :class:`django.views.debug.SafeExceptionReporterFilter` and may override the
 following methods:
 
-.. class:: django.views.debug.SafeExceptionReporterFilter
+.. class:: SafeExceptionReporterFilter
 
-.. method:: SafeExceptionReporterFilter.is_active(self, request)
+.. method:: SafeExceptionReporterFilter.is_active(request)
 
     Returns ``True`` to activate the filtering operated in the other methods.
     By default the filter is active if :setting:`DEBUG` is ``False``.
 
-.. method:: SafeExceptionReporterFilter.get_request_repr(self, request)
+.. method:: SafeExceptionReporterFilter.get_request_repr(request)
 
     Returns the representation string of the request object, that is, the
     value that would be returned by ``repr(request)``, except it uses the
     filtered dictionary of POST parameters as determined by
     :meth:`SafeExceptionReporterFilter.get_post_parameters`.
 
-.. method:: SafeExceptionReporterFilter.get_post_parameters(self, request)
+.. method:: SafeExceptionReporterFilter.get_post_parameters(request)
 
     Returns the filtered dictionary of POST parameters. By default it replaces
     the values of sensitive parameters with stars (`**********`).
 
-.. method:: SafeExceptionReporterFilter.get_traceback_frame_variables(self, request, tb_frame)
+.. method:: SafeExceptionReporterFilter.get_traceback_frame_variables(request, tb_frame)
 
     Returns the filtered dictionary of local variables for the given traceback
     frame. By default it replaces the values of sensitive variables with stars
diff --git a/lib/django-1.5/docs/howto/jython.txt b/lib/django-1.5/docs/howto/jython.txt
index 461a5d3..28cd2c9 100644
--- a/lib/django-1.5/docs/howto/jython.txt
+++ b/lib/django-1.5/docs/howto/jython.txt
@@ -4,77 +4,17 @@
 
 .. index:: Jython, Java, JVM
 
-.. admonition:: Python 2.6 support
+As of January 2014, the latest release of `django-jython`_ supports Django 1.3
+which is no longer supported (receiving fixes or security updates) by the
+Django Project. We therefore recommend that you do not try to run Django on
+Jython at this time.
 
-    Django 1.5 has dropped support for Python 2.5. Therefore, you have to use
-    a Jython 2.7 alpha release if you want to use Django 1.5 with Jython.
-    Please use Django 1.4 if you want to keep using Django on a stable Jython
-    version.
-
-Jython_ is an implementation of Python that runs on the Java platform (JVM).
-Django runs cleanly on Jython version 2.5 or later, which means you can deploy
-Django on any Java platform.
-
-This document will get you up and running with Django on top of Jython.
-
-.. _jython: http://www.jython.org/
-
-Installing Jython
-=================
-
-Django works with Jython versions 2.5b3 and higher. Download Jython at 
-http://www.jython.org/.
-
-Creating a servlet container
-============================
-
-If you just want to experiment with Django, skip ahead to the next section;
-Django includes a lightweight Web server you can use for testing, so you won't
-need to set up anything else until you're ready to deploy Django in production.
-
-If you want to use Django on a production site, use a Java servlet container,
-such as `Apache Tomcat`_. Full JavaEE applications servers such as `GlassFish`_
-or `JBoss`_ are also OK, if you need the extra features they include.
-
-.. _`Apache Tomcat`: http://tomcat.apache.org/
-.. _GlassFish: http://glassfish.java.net/
-.. _JBoss: http://www.jboss.org/
-
-Installing Django
-=================
-
-The next step is to install Django itself. This is exactly the same as
-installing Django on standard Python, so see
-:ref:`removing-old-versions-of-django` and :ref:`install-django-code` for
-instructions.
-
-Installing Jython platform support libraries
-============================================
-
-The `django-jython`_ project contains database backends and management commands
-for Django/Jython development. Note that the builtin Django backends won't work
-on top of Jython.
+The django-jython project is `seeking contributors`_ to help update its code for
+newer versions of Django. You can select an older version of this documentation
+to see the instructions we had for using Django with Jython. If django-jython
+is updated and please `file a ticket`_ and we'll be happy to update our
+documentation accordingly.
 
 .. _`django-jython`: http://code.google.com/p/django-jython/
-
-To install it, follow the `installation instructions`_ detailed on the project
-Web site. Also, read the `database backends`_ documentation there.
-
-.. _`installation instructions`: http://code.google.com/p/django-jython/wiki/Install
-.. _`database backends`: http://code.google.com/p/django-jython/wiki/DatabaseBackends
-
-Differences with Django on Jython
-=================================
-
-.. index:: JYTHONPATH
-
-At this point, Django on Jython should behave nearly identically to Django
-running on standard Python. However, are a few differences to keep in mind:
-
-* Remember to use the ``jython`` command instead of ``python``. The
-  documentation uses ``python`` for consistency, but if you're using Jython
-  you'll want to mentally replace ``python`` with ``jython`` every time it
-  occurs.
-
-* Similarly, you'll need to use the ``JYTHONPATH`` environment variable
-  instead of ``PYTHONPATH``.
+.. _`seeking contributors`: https://groups.google.com/d/topic/django-jython-dev/oZpKucQpz7I/discussion
+.. _`file a ticket`: https://code.djangoproject.com/newticket
diff --git a/lib/django-1.5/docs/howto/upgrade-version.txt b/lib/django-1.5/docs/howto/upgrade-version.txt
index fd018d4..9495456 100644
--- a/lib/django-1.5/docs/howto/upgrade-version.txt
+++ b/lib/django-1.5/docs/howto/upgrade-version.txt
@@ -47,7 +47,7 @@
 
 Once you're ready, it is time to :doc:`install the new Django version
 </topics/install>`. If you are using virtualenv_ and it is a major upgrade, you
-might want to set up a new environment will all the dependencies first.
+might want to set up a new environment with all the dependencies first.
 
 Exactly which steps you will need to take depends on your installation process.
 The most convenient way is to use pip_ with the ``--upgrade`` or ``-U`` flag:
diff --git a/lib/django-1.5/docs/index.txt b/lib/django-1.5/docs/index.txt
index b1686c6..2b16471 100644
--- a/lib/django-1.5/docs/index.txt
+++ b/lib/django-1.5/docs/index.txt
@@ -212,6 +212,7 @@
 applications and Django provides multiple protection tools and mechanisms:
 
 * :doc:`Security overview <topics/security>`
+* :doc:`Disclosed security issues in Django <releases/security>`
 * :doc:`Clickjacking protection <ref/clickjacking>`
 * :doc:`Cross Site Request Forgery protection <ref/contrib/csrf>`
 * :doc:`Cryptographic signing <topics/signing>`
@@ -272,7 +273,7 @@
 
 * :doc:`Conditional content processing <topics/conditional-view-processing>`
 * :doc:`Content types and generic relations <ref/contrib/contenttypes>`
-* :doc:`Databrowse <ref/contrib/databrowse>`
+* :doc:`Databrowse <ref/contrib/databrowse>` (deprecated)
 * :doc:`Flatpages <ref/contrib/flatpages>`
 * :doc:`Redirects <ref/contrib/redirects>`
 * :doc:`Signals <topics/signals>`
diff --git a/lib/django-1.5/docs/internals/contributing/writing-code/unit-tests.txt b/lib/django-1.5/docs/internals/contributing/writing-code/unit-tests.txt
index 1770e4a..eecca7c 100644
--- a/lib/django-1.5/docs/internals/contributing/writing-code/unit-tests.txt
+++ b/lib/django-1.5/docs/internals/contributing/writing-code/unit-tests.txt
@@ -33,7 +33,7 @@
 
 .. code-block:: bash
 
-    git clone git@github.com:django/django.git django-repo
+   $ git clone https://github.com:django/django.git django-repo
     cd django-repo/tests
     PYTHONPATH=..:$PYTHONPATH python ./runtests.py --settings=test_sqlite
 
@@ -65,8 +65,8 @@
 If you're using a backend that isn't SQLite, you will need to provide other
 details for each database:
 
-* The :setting:`USER` option for each of your databases needs to
-  specify an existing user account for the database.
+* The :setting:`USER` option needs to specify an existing user account
+  for the database.
 
 * The :setting:`PASSWORD` option needs to provide the password for
   the :setting:`USER` that has been specified.
@@ -138,6 +138,7 @@
 If you want to run the full suite of tests, you'll need to install a number of
 dependencies:
 
+*  numpy_
 *  PIL_
 *  py-bcrypt_
 *  PyYAML_
@@ -168,6 +169,7 @@
 Each of these dependencies is optional. If you're missing any of them, the
 associated tests will be skipped.
 
+.. _numpy: https://pypi.python.org/pypi/numpy
 .. _PIL: https://pypi.python.org/pypi/PIL
 .. _py-bcrypt: https://pypi.python.org/pypi/py-bcrypt/
 .. _PyYAML: http://pyyaml.org/wiki/PyYAML
diff --git a/lib/django-1.5/docs/internals/deprecation.txt b/lib/django-1.5/docs/internals/deprecation.txt
index dd2548b..6257451 100644
--- a/lib/django-1.5/docs/internals/deprecation.txt
+++ b/lib/django-1.5/docs/internals/deprecation.txt
@@ -7,103 +7,6 @@
 :ref:`deprecation policy <internal-release-deprecation-policy>`. More details
 about each item can often be found in the release notes of two versions prior.
 
-1.4
----
-
-See the :doc:`Django 1.2 release notes</releases/1.2>` for more details on
-these changes.
-
-* ``CsrfResponseMiddleware`` and ``CsrfMiddleware`` will be removed.  Use
-  the ``{% csrf_token %}`` template tag inside forms to enable CSRF
-  protection. ``CsrfViewMiddleware`` remains and is enabled by default.
-
-* The old imports for CSRF functionality (``django.contrib.csrf.*``),
-  which moved to core in 1.2, will be removed.
-
-* The ``django.contrib.gis.db.backend`` module will be removed in favor
-  of the specific backends.
-
-* ``SMTPConnection`` will be removed in favor of a generic Email backend API.
-
-* The many to many SQL generation functions on the database backends
-  will be removed.
-
-* The ability to use the ``DATABASE_*`` family of top-level settings to
-  define database connections will be removed.
-
-* The ability to use shorthand notation to specify a database backend
-  (i.e., ``sqlite3`` instead of ``django.db.backends.sqlite3``) will be
-  removed.
-
-* The ``get_db_prep_save``, ``get_db_prep_value`` and
-  ``get_db_prep_lookup`` methods will have to support multiple databases.
-
-* The ``Message`` model (in ``django.contrib.auth``), its related
-  manager in the ``User`` model (``user.message_set``), and the
-  associated methods (``user.message_set.create()`` and
-  ``user.get_and_delete_messages()``), will be removed.  The
-  :doc:`messages framework </ref/contrib/messages>` should be used
-  instead. The related ``messages`` variable returned by the
-  auth context processor will also be removed. Note that this
-  means that the admin application will depend on the messages
-  context processor.
-
-* Authentication backends will need to support the ``obj`` parameter for
-  permission checking. The ``supports_object_permissions`` attribute
-  will no longer be checked and can be removed from custom backends.
-
-* Authentication backends will need to support the ``AnonymousUser`` class
-  being passed to all methods dealing with permissions.  The
-  ``supports_anonymous_user`` variable will no longer be checked and can be
-  removed from custom backends.
-
-* The ability to specify a callable template loader rather than a
-  ``Loader`` class will be removed, as will the ``load_template_source``
-  functions that are included with the built in template loaders for
-  backwards compatibility.
-
-* ``django.utils.translation.get_date_formats()`` and
-  ``django.utils.translation.get_partial_date_formats()``. These functions
-  will be removed; use the locale-aware
-  ``django.utils.formats.get_format()`` to get the appropriate formats.
-
-* In ``django.forms.fields``, the constants: ``DEFAULT_DATE_INPUT_FORMATS``,
-  ``DEFAULT_TIME_INPUT_FORMATS`` and
-  ``DEFAULT_DATETIME_INPUT_FORMATS`` will be removed. Use
-  ``django.utils.formats.get_format()`` to get the appropriate
-  formats.
-
-* The ability to use a function-based test runners will be removed,
-  along with the ``django.test.simple.run_tests()`` test runner.
-
-* The ``views.feed()`` view and ``feeds.Feed`` class in
-  ``django.contrib.syndication`` will be removed. The class-based view
-  ``views.Feed`` should be used instead.
-
-* ``django.core.context_processors.auth``.  This release will
-  remove the old method in favor of the new method in
-  ``django.contrib.auth.context_processors.auth``.
-
-* The ``postgresql`` database backend will be removed, use the
-  ``postgresql_psycopg2`` backend instead.
-
-* The ``no`` language code will be removed and has been replaced by the
-  ``nb`` language code.
-
-* Authentication backends will need to define the boolean attribute
-  ``supports_inactive_user`` until version 1.5 when it will be assumed that
-  all backends will handle inactive users.
-
-* ``django.db.models.fields.XMLField`` will be removed. This was
-  deprecated as part of the 1.3 release. An accelerated deprecation
-  schedule has been used because the field hasn't performed any role
-  beyond that of a simple ``TextField`` since the removal of oldforms.
-  All uses of ``XMLField`` can be replaced with ``TextField``.
-
-* The undocumented ``mixin`` parameter to the ``open()`` method of
-  ``django.core.files.storage.Storage`` (and subclasses) will be removed.
-
-
 1.5
 ---
 
diff --git a/lib/django-1.5/docs/internals/release-process.txt b/lib/django-1.5/docs/internals/release-process.txt
index 8affddb..a8512dc 100644
--- a/lib/django-1.5/docs/internals/release-process.txt
+++ b/lib/django-1.5/docs/internals/release-process.txt
@@ -139,6 +139,20 @@
 * Documentation fixes will be applied to trunk, and, if easily backported, to
   the ``1.3.X`` branch.
 
+.. _lts-releases:
+
+Long-term support (LTS) releases
+================================
+
+Additionally, the Django team will occasionally designate certain releases
+to be "Long-term support" (LTS) releases. LTS releases will get security fixes
+applied for a guaranteed period of time, typically 3+ years, regardless of
+the pace of releases afterwards.
+
+The follow releases have been designated for long-term support:
+
+* Django 1.4, supported until at least March 2015.
+
 .. _release-process:
 
 Release process
diff --git a/lib/django-1.5/docs/internals/security.txt b/lib/django-1.5/docs/internals/security.txt
index 96e1141..f5b5fde 100644
--- a/lib/django-1.5/docs/internals/security.txt
+++ b/lib/django-1.5/docs/internals/security.txt
@@ -1,3 +1,5 @@
+.. _internals-security:
+
 ==========================
 Django's security policies
 ==========================
@@ -56,6 +58,9 @@
   Django 1.3. Upon the release of Django 1.5, Django 1.3's security
   support will end.
 
+* :ref:`Long-term support (LTS) releases <lts-releases>` will receive
+  security updates for a specified period.
+
 When new releases are issued for security reasons, the accompanying
 notice will include a list of affected versions. This list is
 comprised solely of *supported* versions of Django: older versions may
@@ -124,6 +129,10 @@
 maintainers, and coordinate our own disclosure and resolution with
 theirs.
 
+The Django team also maintains an :doc:`archive of security issues
+disclosed in Django</releases/security>`.
+
+
 .. _security-notifications:
 
 Who receives advance notification
diff --git a/lib/django-1.5/docs/intro/reusable-apps.txt b/lib/django-1.5/docs/intro/reusable-apps.txt
index c261433..9418ffa 100644
--- a/lib/django-1.5/docs/intro/reusable-apps.txt
+++ b/lib/django-1.5/docs/intro/reusable-apps.txt
@@ -100,14 +100,14 @@
 =============================
 
 The current state of Python packaging is a bit muddled with various tools. For
-this tutorial, we're going to use distribute_ to build our package. It's a
-community-maintained fork of the older ``setuptools`` project. We'll also be
+this tutorial, we're going to use setuptools_ to build our package. It's the
+recommended packaging tool (merged with the ``distribute`` fork). We'll also be
 using `pip`_ to install and uninstall it. You should install these
 two packages now. If you need help, you can refer to :ref:`how to install
-Django with pip<installing-official-release>`. You can install ``distribute``
+Django with pip<installing-official-release>`. You can install ``setuptools``
 the same way.
 
-.. _distribute: http://pypi.python.org/pypi/distribute
+.. _setuptools: https://pypi.python.org/pypi/setuptools
 .. _pip: http://pypi.python.org/pypi/pip
 
 Packaging your app
@@ -163,17 +163,18 @@
     5. Visit http://127.0.0.1:8000/polls/ to participate in the poll.
 
 4. Create a ``django-polls/LICENSE`` file. Choosing a license is beyond the
-scope of this tutorial, but suffice it to say that code released publicly
-without a license is *useless*. Django and many Django-compatible apps are
-distributed under the BSD license; however, you're free to pick your own
-license. Just be aware that your licensing choice will affect who is able
-to use your code.
+   scope of this tutorial, but suffice it to say that code released publicly
+   without a license is *useless*. Django and many Django-compatible apps are
+   distributed under the BSD license; however, you're free to pick your own
+   license. Just be aware that your licensing choice will affect who is able
+   to use your code.
 
 5. Next we'll create a ``setup.py`` file which provides details about how to
-build and install the app.  A full explanation of this file is beyond the
-scope of this tutorial, but the `distribute docs
-<http://packages.python.org/distribute/setuptools.html>`_ have a good explanation.
-Create a file ``django-polls/setup.py`` with the following contents::
+   build and install the app. A full explanation of this file is beyond the
+   scope of this tutorial, but the `setuptools docs
+   <http://packages.python.org/setuptools/setuptools.html>`_ have a good
+   explanation. Create a file ``django-polls/setup.py`` with the following
+   contents::
 
     import os
     from setuptools import setup
@@ -208,15 +209,9 @@
         ],
     )
 
-.. admonition:: I thought you said we were going to use ``distribute``?
-
-    Distribute is a drop-in replacement for ``setuptools``. Even though we
-    appear to import from ``setuptools``, since we have ``distribute``
-    installed, it will override the import.
-
 6. Only Python modules and packages are included in the package by default. To
    include additional files, we'll need to create a ``MANIFEST.in`` file. The
-   distribute docs referred to in the previous step discuss this file in more
+   setuptools docs referred to in the previous step discuss this file in more
    details. To include the templates, the ``README.rst`` and our ``LICENSE``
    file, create a file ``django-polls/MANIFEST.in`` with the following
    contents::
diff --git a/lib/django-1.5/docs/intro/tutorial01.txt b/lib/django-1.5/docs/intro/tutorial01.txt
index d9e8319..8062f1c 100644
--- a/lib/django-1.5/docs/intro/tutorial01.txt
+++ b/lib/django-1.5/docs/intro/tutorial01.txt
@@ -181,6 +181,14 @@
     Full docs for the development server can be found in the
     :djadmin:`runserver` reference.
 
+.. admonition:: Automatic reloading of :djadmin:`runserver`
+
+    The development server automatically reloads Python code for each request
+    as needed. You don't need to restart the server for code changes to take
+    effect. However, some actions like adding files or compiling translation
+    files don't trigger a restart, so you'll have to restart the server in
+    these cases.
+
 Database setup
 --------------
 
diff --git a/lib/django-1.5/docs/intro/tutorial04.txt b/lib/django-1.5/docs/intro/tutorial04.txt
index 9f54243..ee0f0ad 100644
--- a/lib/django-1.5/docs/intro/tutorial04.txt
+++ b/lib/django-1.5/docs/intro/tutorial04.txt
@@ -33,7 +33,8 @@
   ``value`` of each radio button is the associated poll choice's ID. The
   ``name`` of each radio button is ``"choice"``. That means, when somebody
   selects one of the radio buttons and submits the form, it'll send the
-  POST data ``choice=3``. This is the basic concept of HTML forms.
+  POST data ``choice=#`` where # is the ID of the selected choice. This is the
+  basic concept of HTML forms.
 
 * We set the form's ``action`` to ``{% url 'polls:vote' poll.id %}``, and we
   set ``method="post"``. Using ``method="post"`` (as opposed to
@@ -266,7 +267,7 @@
 
 By default, the :class:`~django.views.generic.detail.DetailView` generic
 view uses a template called ``<app name>/<model name>_detail.html``.
-In our case, it'll use the template ``"polls/poll_detail.html"``. The
+In our case, it would use the template ``"polls/poll_detail.html"``. The
 ``template_name`` attribute is used to tell Django to use a specific
 template name instead of the autogenerated default template name. We
 also specify the ``template_name`` for the ``results`` list view --
diff --git a/lib/django-1.5/docs/intro/tutorial05.txt b/lib/django-1.5/docs/intro/tutorial05.txt
index 60a8996..23c9856 100644
--- a/lib/django-1.5/docs/intro/tutorial05.txt
+++ b/lib/django-1.5/docs/intro/tutorial05.txt
@@ -19,8 +19,8 @@
 examine the overall operation of the software - *does a sequence of user inputs
 on the site produce the desired result?* That's no different from the kind of
 testing you did earlier in :doc:`Tutorial 1 </intro/tutorial01>`, using the
-shell to examine the behavior of a method, or running the application and
-entering data to check how it behaves.
+:djadmin:`shell` to examine the behavior of a method, or running the
+application and entering data to check how it behaves.
 
 What's different in *automated* tests is that the testing work is done for
 you by the system. You create a set of tests once, and then as you make changes
@@ -137,7 +137,7 @@
 You can see this in the Admin; create a poll whose date lies in the future;
 you'll see that the ``Poll`` change list claims it was published recently.
 
-You can also see this using the shell::
+You can also see this using the :djadmin:`shell`::
 
     >>> import datetime
     >>> from django.utils import timezone
@@ -153,8 +153,8 @@
 Create a test to expose the bug
 -------------------------------
 
-What we've just done in the shell to test for the problem is exactly what we
-can do in an automated test, so let's turn that into an automated test.
+What we've just done in the :djadmin:`shell` to test for the problem is exactly
+what we can do in an automated test, so let's turn that into an automated test.
 
 The best place for an application's tests is in the application's ``tests.py``
 file - the testing system will look there for tests automatically.
@@ -318,11 +318,11 @@
 
 Django provides a test :class:`~django.test.client.Client` to simulate a user
 interacting with the code at the view level.  We can use it in ``tests.py``
-or even in the shell.
+or even in the :djadmin:`shell`.
 
-We will start again with the shell, where we need to do a couple of things that
-won't be necessary in ``tests.py``. The first is to set up the test environment
-in the shell::
+We will start again with the :djadmin:`shell`, where we need to do a couple of
+things that won't be necessary in ``tests.py``. The first is to set up the test
+environment in the :djadmin:`shell`::
 
     >>> from django.test.utils import setup_test_environment
     >>> setup_test_environment()
@@ -421,7 +421,7 @@
 the past and future, and checking that only those that have been published are
 listed.  You don't want to have to do that *every single time you make any
 change that might affect this* - so let's also create a test, based on our
-shell session above.
+:djadmin:`shell` session above.
 
 Add the following to ``polls/tests.py``::
 
diff --git a/lib/django-1.5/docs/ref/class-based-views/base.txt b/lib/django-1.5/docs/ref/class-based-views/base.txt
index 93cef1c..9deff15 100644
--- a/lib/django-1.5/docs/ref/class-based-views/base.txt
+++ b/lib/django-1.5/docs/ref/class-based-views/base.txt
@@ -110,6 +110,7 @@
     This view inherits methods and attributes from the following views:
 
     * :class:`django.views.generic.base.TemplateResponseMixin`
+    * :class:`django.views.generic.base.ContextMixin`
     * :class:`django.views.generic.base.View`
 
     **Method Flowchart**
@@ -145,8 +146,8 @@
 
     **Context**
 
-    * ``params``: The dictionary of keyword arguments captured from the URL
-      pattern that served the view.
+    * Populated (through :class:`~django.views.generic.base.ContextMixin`) with
+      the keyword arguments captured from the URL pattern that served the view.
 
 RedirectView
 ------------
@@ -178,6 +179,7 @@
 
     **Example views.py**::
 
+        from django.core.urlresolvers import reverse
         from django.shortcuts import get_object_or_404
         from django.views.generic.base import RedirectView
 
diff --git a/lib/django-1.5/docs/ref/class-based-views/generic-date-based.txt b/lib/django-1.5/docs/ref/class-based-views/generic-date-based.txt
index 6bff045..f3b7c41 100644
--- a/lib/django-1.5/docs/ref/class-based-views/generic-date-based.txt
+++ b/lib/django-1.5/docs/ref/class-based-views/generic-date-based.txt
@@ -619,8 +619,11 @@
 .. note::
 
     All of the generic views listed above have matching ``Base`` views that
-    only differ in that the they do not include the
-    :class:`~django.views.generic.detail.SingleObjectTemplateResponseMixin`:
+    only differ in that they do not include the
+    :class:`~django.views.generic.list.MultipleObjectTemplateResponseMixin`
+    (for the archive views) or
+    :class:`~django.views.generic.detail.SingleObjectTemplateResponseMixin`
+    (for the :class:`DateDetailView`):
 
     .. class:: BaseArchiveIndexView
 
diff --git a/lib/django-1.5/docs/ref/class-based-views/mixins-date-based.txt b/lib/django-1.5/docs/ref/class-based-views/mixins-date-based.txt
index 75f2a77..d1979b9 100644
--- a/lib/django-1.5/docs/ref/class-based-views/mixins-date-based.txt
+++ b/lib/django-1.5/docs/ref/class-based-views/mixins-date-based.txt
@@ -4,6 +4,11 @@
 
 .. currentmodule:: django.views.generic.dates
 
+.. note::
+    All the date formatting attributes in these mixins use
+    :func:`~time.strftime` format characters. Do not try to use the format
+    characters from the :ttag:`now` template tag as they are not compatible.
+
 YearMixin
 ---------
 
diff --git a/lib/django-1.5/docs/ref/contrib/admin/index.txt b/lib/django-1.5/docs/ref/contrib/admin/index.txt
index 26ee086..25cb1ea 100644
--- a/lib/django-1.5/docs/ref/contrib/admin/index.txt
+++ b/lib/django-1.5/docs/ref/contrib/admin/index.txt
@@ -1009,7 +1009,7 @@
     save/delete the object, they are not for veto purposes, rather they allow
     you to perform extra operations.
 
-.. method:: ModelAdmin.save_model(self, request, obj, form, change)
+.. method:: ModelAdmin.save_model(request, obj, form, change)
 
     The ``save_model`` method is given the ``HttpRequest``, a model instance,
     a ``ModelForm`` instance and a boolean value based on whether it is adding
@@ -1022,12 +1022,12 @@
                 obj.user = request.user
                 obj.save()
 
-.. method:: ModelAdmin.delete_model(self, request, obj)
+.. method:: ModelAdmin.delete_model(request, obj)
 
     The ``delete_model`` method is given the ``HttpRequest`` and a model
     instance. Use this method to do pre- or post-delete operations.
 
-.. method:: ModelAdmin.save_formset(self, request, form, formset, change)
+.. method:: ModelAdmin.save_formset(request, form, formset, change)
 
     The ``save_formset`` method is given the ``HttpRequest``, the parent
     ``ModelForm`` instance and a boolean value based on whether it is adding or
@@ -1044,7 +1044,7 @@
                     instance.save()
                 formset.save_m2m()
 
-.. method:: ModelAdmin.get_ordering(self, request)
+.. method:: ModelAdmin.get_ordering(request)
 
     .. versionadded:: 1.4
 
@@ -1060,7 +1060,7 @@
                 else:
                     return ['name']
 
-.. method:: ModelAdmin.save_related(self, request, form, formsets, change)
+.. method:: ModelAdmin.save_related(request, form, formsets, change)
 
     .. versionadded:: 1.4
 
@@ -1070,14 +1070,14 @@
     pre- or post-save operations for objects related to the parent. Note
     that at this point the parent object and its form have already been saved.
 
-.. method:: ModelAdmin.get_readonly_fields(self, request, obj=None)
+.. method:: ModelAdmin.get_readonly_fields(request, obj=None)
 
     The ``get_readonly_fields`` method is given the ``HttpRequest`` and the
     ``obj`` being edited (or ``None`` on an add form) and is expected to return
     a ``list`` or ``tuple`` of field names that will be displayed as read-only,
     as described above in the :attr:`ModelAdmin.readonly_fields` section.
 
-.. method:: ModelAdmin.get_prepopulated_fields(self, request, obj=None)
+.. method:: ModelAdmin.get_prepopulated_fields(request, obj=None)
 
     .. versionadded:: 1.4
 
@@ -1086,7 +1086,7 @@
     a ``dictionary``, as described above in the :attr:`ModelAdmin.prepopulated_fields`
     section.
 
-.. method:: ModelAdmin.get_list_display(self, request)
+.. method:: ModelAdmin.get_list_display(request)
 
     .. versionadded:: 1.4
 
@@ -1095,7 +1095,7 @@
     displayed on the changelist view as described above in the
     :attr:`ModelAdmin.list_display` section.
 
-.. method:: ModelAdmin.get_list_display_links(self, request, list_display)
+.. method:: ModelAdmin.get_list_display_links(request, list_display)
 
     .. versionadded:: 1.4
 
@@ -1105,14 +1105,14 @@
     changelist that will be linked to the change view, as described in the
     :attr:`ModelAdmin.list_display_links` section.
 
-.. method:: ModelAdmin.get_fieldsets(self, request, obj=None)
+.. method:: ModelAdmin.get_fieldsets(request, obj=None)
 
     The ``get_fieldsets`` method is given the ``HttpRequest`` and the ``obj``
     being edited (or ``None`` on an add form) and is expected to return a list
     of two-tuples, in which each two-tuple represents a ``<fieldset>`` on the
     admin form page, as described above in the :attr:`ModelAdmin.fieldsets` section.
 
-.. method:: ModelAdmin.get_list_filter(self, request)
+.. method:: ModelAdmin.get_list_filter(request)
 
     .. versionadded:: 1.5
 
@@ -1120,7 +1120,7 @@
     to return the same kind of sequence type as for the
     :attr:`~ModelAdmin.list_filter` attribute.
 
-.. method:: ModelAdmin.get_inline_instances(self, request, obj=None)
+.. method:: ModelAdmin.get_inline_instances(request, obj=None)
 
     .. versionadded:: 1.5
 
@@ -1130,7 +1130,7 @@
     objects, as described below in the :class:`~django.contrib.admin.InlineModelAdmin`
     section.
 
-.. method:: ModelAdmin.get_urls(self)
+.. method:: ModelAdmin.get_urls()
 
     The ``get_urls`` method on a ``ModelAdmin`` returns the URLs to be used for
     that ModelAdmin in the same way as a URLconf.  Therefore you can extend
@@ -1195,7 +1195,7 @@
 
         (r'^my_view/$', self.admin_site.admin_view(self.my_view, cacheable=True))
 
-.. method:: ModelAdmin.get_form(self, request, obj=None, **kwargs)
+.. method:: ModelAdmin.get_form(request, obj=None, **kwargs)
 
     Returns a :class:`~django.forms.ModelForm` class for use in the admin add
     and change views, see :meth:`add_view` and :meth:`change_view`.
@@ -1210,7 +1210,7 @@
                     self.exclude.append('field_to_hide')
                 return super(MyModelAdmin, self).get_form(request, obj, **kwargs)
 
-.. method:: ModelAdmin.get_formsets(self, request, obj=None)
+.. method:: ModelAdmin.get_formsets(request, obj=None)
 
     Yields :class:`InlineModelAdmin`\s for use in admin add and change views.
 
@@ -1227,7 +1227,7 @@
                         continue
                     yield inline.get_formset(request, obj)
 
-.. method:: ModelAdmin.formfield_for_foreignkey(self, db_field, request, **kwargs)
+.. method:: ModelAdmin.formfield_for_foreignkey(db_field, request, **kwargs)
 
     The ``formfield_for_foreignkey`` method on a ``ModelAdmin`` allows you to
     override the default formfield for a foreign keys field. For example, to
@@ -1242,7 +1242,7 @@
     This uses the ``HttpRequest`` instance to filter the ``Car`` foreign key
     field to only display the cars owned by the ``User`` instance.
 
-.. method:: ModelAdmin.formfield_for_manytomany(self, db_field, request, **kwargs)
+.. method:: ModelAdmin.formfield_for_manytomany(db_field, request, **kwargs)
 
     Like the ``formfield_for_foreignkey`` method, the
     ``formfield_for_manytomany`` method can be overridden to change the
@@ -1257,7 +1257,7 @@
                     kwargs["queryset"] = Car.objects.filter(owner=request.user)
                 return super(MyModelAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
 
-.. method:: ModelAdmin.formfield_for_choice_field(self, db_field, request, **kwargs)
+.. method:: ModelAdmin.formfield_for_choice_field(db_field, request, **kwargs)
 
     Like the ``formfield_for_foreignkey`` and ``formfield_for_manytomany``
     methods, the ``formfield_for_choice_field`` method can be overridden to
@@ -1276,13 +1276,13 @@
                         kwargs['choices'] += (('ready', 'Ready for deployment'),)
                 return super(MyModelAdmin, self).formfield_for_choice_field(db_field, request, **kwargs)
 
-.. method:: ModelAdmin.get_changelist(self, request, **kwargs)
+.. method:: ModelAdmin.get_changelist(request, **kwargs)
 
     Returns the ``Changelist`` class to be used for listing. By default,
     ``django.contrib.admin.views.main.ChangeList`` is used. By inheriting this
     class you can change the behavior of the listing.
 
-.. method:: ModelAdmin.get_changelist_form(self, request, **kwargs)
+.. method:: ModelAdmin.get_changelist_form(request, **kwargs)
 
     Returns a :class:`~django.forms.ModelForm` class for use in the ``Formset``
     on the changelist page. To use a custom form, for example::
@@ -1295,7 +1295,7 @@
             def get_changelist_form(self, request, **kwargs):
                 return MyForm
 
-.. method::  ModelAdmin.get_changelist_formset(self, request, **kwargs)
+.. method::  ModelAdmin.get_changelist_formset(request, **kwargs)
 
     Returns a :ref:`ModelFormSet <model-formsets>` class for use on the
     changelist page if :attr:`~ModelAdmin.list_editable` is used. To use a
@@ -1311,12 +1311,12 @@
                 kwargs['formset'] = MyAdminFormSet
                 return super(MyModelAdmin, self).get_changelist_formset(request, **kwargs)
 
-.. method:: ModelAdmin.has_add_permission(self, request)
+.. method:: ModelAdmin.has_add_permission(request)
 
     Should return ``True`` if adding an object is permitted, ``False``
     otherwise.
 
-.. method:: ModelAdmin.has_change_permission(self, request, obj=None)
+.. method:: ModelAdmin.has_change_permission(request, obj=None)
 
     Should return ``True`` if editing obj is permitted, ``False`` otherwise.
     If obj is ``None``, should return ``True`` or ``False`` to indicate whether
@@ -1324,7 +1324,7 @@
     will be interpreted as meaning that the current user is not permitted to
     edit any object of this type).
 
-.. method:: ModelAdmin.has_delete_permission(self, request, obj=None)
+.. method:: ModelAdmin.has_delete_permission(request, obj=None)
 
     Should return ``True`` if deleting obj is permitted, ``False`` otherwise.
     If obj is ``None``, should return ``True`` or ``False`` to indicate whether
@@ -1332,7 +1332,7 @@
     be interpreted as meaning that the current user is not permitted to delete
     any object of this type).
 
-.. method:: ModelAdmin.queryset(self, request)
+.. method:: ModelAdmin.queryset(request)
 
     The ``queryset`` method on a ``ModelAdmin`` returns a
     :class:`~django.db.models.query.QuerySet` of all model instances that
@@ -1368,11 +1368,11 @@
 Other methods
 ~~~~~~~~~~~~~
 
-.. method:: ModelAdmin.add_view(self, request, form_url='', extra_context=None)
+.. method:: ModelAdmin.add_view(request, form_url='', extra_context=None)
 
     Django view for the model instance addition page. See note below.
 
-.. method:: ModelAdmin.change_view(self, request, object_id, form_url='', extra_context=None)
+.. method:: ModelAdmin.change_view(request, object_id, form_url='', extra_context=None)
 
     Django view for the model instance edition page. See note below.
 
@@ -1380,17 +1380,17 @@
 
     The ``form_url`` parameter was added.
 
-.. method:: ModelAdmin.changelist_view(self, request, extra_context=None)
+.. method:: ModelAdmin.changelist_view(request, extra_context=None)
 
     Django view for the model instances change list/actions page. See note
     below.
 
-.. method:: ModelAdmin.delete_view(self, request, object_id, extra_context=None)
+.. method:: ModelAdmin.delete_view(request, object_id, extra_context=None)
 
     Django view for the model instance(s) deletion confirmation page. See note
     below.
 
-.. method:: ModelAdmin.history_view(self, request, object_id, extra_context=None)
+.. method:: ModelAdmin.history_view(request, object_id, extra_context=None)
 
     Django view for the page that shows the modification history for a given
     model instance.
@@ -1651,7 +1651,7 @@
     Specifies whether or not inline objects can be deleted in the inline.
     Defaults to ``True``.
 
-.. method:: InlineModelAdmin.get_formset(self, request, obj=None, **kwargs)
+.. method:: InlineModelAdmin.get_formset(request, obj=None, **kwargs)
 
     Returns a :class:`~django.forms.models.BaseInlineFormSet` class for use in
     admin add/change views. See the example for
@@ -2159,7 +2159,8 @@
 if you specifically wanted the admin view from the admin instance named
 ``custom``, you would need to call::
 
-    >>> change_url = urlresolvers.reverse('custom:polls_choice_change', args=(c.id,))
+    >>> change_url = urlresolvers.reverse('admin:polls_choice_change',
+    ...                                   args=(c.id,), current_app='custom')
 
 For more details, see the documentation on :ref:`reversing namespaced URLs
 <topics-http-reversing-url-namespaces>`.
diff --git a/lib/django-1.5/docs/ref/contrib/auth.txt b/lib/django-1.5/docs/ref/contrib/auth.txt
index 838784c..c5b3873 100644
--- a/lib/django-1.5/docs/ref/contrib/auth.txt
+++ b/lib/django-1.5/docs/ref/contrib/auth.txt
@@ -128,6 +128,12 @@
         the :attr:`~django.contrib.auth.models.User.last_name`, with a space in
         between.
 
+    .. method:: get_short_name()
+
+        .. versionadded:: 1.5
+
+        Returns the :attr:`~django.contrib.auth.models.User.first_name`.
+
     .. method:: set_password(raw_password)
 
         Sets the user's password to the given raw string, taking care of the
@@ -254,7 +260,7 @@
 
         See :ref:`Creating users <topics-auth-creating-users>` for example usage.
 
-    .. method:: create_superuser(self, username, email, password, **extra_fields)
+    .. method:: create_superuser(username, email, password, **extra_fields)
 
         Same as :meth:`create_user`, but sets :attr:`~models.User.is_staff` and
         :attr:`~models.User.is_superuser` to ``True``.
diff --git a/lib/django-1.5/docs/ref/contrib/comments/moderation.txt b/lib/django-1.5/docs/ref/contrib/comments/moderation.txt
index a7138dd..dcd8b79 100644
--- a/lib/django-1.5/docs/ref/contrib/comments/moderation.txt
+++ b/lib/django-1.5/docs/ref/contrib/comments/moderation.txt
@@ -210,7 +210,7 @@
     :func:`moderator.unregister` methods detailed above, the following methods
     on :class:`Moderator` can be overridden to achieve customized behavior:
 
-    .. method:: connect
+    .. method:: connect()
 
         Determines how moderation is set up globally. The base
         implementation in
diff --git a/lib/django-1.5/docs/ref/contrib/csrf.txt b/lib/django-1.5/docs/ref/contrib/csrf.txt
index 22e7547..a5f4492 100644
--- a/lib/django-1.5/docs/ref/contrib/csrf.txt
+++ b/lib/django-1.5/docs/ref/contrib/csrf.txt
@@ -448,7 +448,7 @@
 View needs protection for one path
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-A view needs CRSF protection under one set of conditions only, and mustn't have
+A view needs CSRF protection under one set of conditions only, and mustn't have
 it for the rest of the time.
 
 Solution: use :func:`~django.views.decorators.csrf.csrf_exempt` for the whole
diff --git a/lib/django-1.5/docs/ref/contrib/databrowse.txt b/lib/django-1.5/docs/ref/contrib/databrowse.txt
index aff8257..acfe851 100644
--- a/lib/django-1.5/docs/ref/contrib/databrowse.txt
+++ b/lib/django-1.5/docs/ref/contrib/databrowse.txt
@@ -6,7 +6,8 @@
    :synopsis: Databrowse is a Django application that lets you browse your data.
 
 .. deprecated:: 1.4
-    This module has been deprecated.
+    This module has been deprecated. It's available as a `third-party package
+    <https://pypi.python.org/pypi/django-databrowse>`_.
 
 Databrowse is a Django application that lets you browse your data.
 
diff --git a/lib/django-1.5/docs/ref/contrib/formtools/form-preview.txt b/lib/django-1.5/docs/ref/contrib/formtools/form-preview.txt
index 011e72c..3c891e2 100644
--- a/lib/django-1.5/docs/ref/contrib/formtools/form-preview.txt
+++ b/lib/django-1.5/docs/ref/contrib/formtools/form-preview.txt
@@ -110,7 +110,7 @@
 Advanced ``FormPreview`` methods
 ================================
 
-.. method:: FormPreview.process_preview
+.. method:: FormPreview.process_preview()
 
     Given a validated form, performs any extra processing before displaying the
     preview page, and saves any extra data in context.
diff --git a/lib/django-1.5/docs/ref/contrib/formtools/form-wizard.txt b/lib/django-1.5/docs/ref/contrib/formtools/form-wizard.txt
index 18586ed..b2bcad6 100644
--- a/lib/django-1.5/docs/ref/contrib/formtools/form-wizard.txt
+++ b/lib/django-1.5/docs/ref/contrib/formtools/form-wizard.txt
@@ -229,7 +229,7 @@
 Hooking the wizard into a URLconf
 ---------------------------------
 
-.. method:: WizardView.as_view
+.. method:: WizardView.as_view()
 
 Finally, we need to specify which forms to use in the wizard, and then
 deploy the new :class:`WizardView` object at a URL in the ``urls.py``. The
diff --git a/lib/django-1.5/docs/ref/contrib/gis/gdal.txt b/lib/django-1.5/docs/ref/contrib/gis/gdal.txt
index c680306..3f26606 100644
--- a/lib/django-1.5/docs/ref/contrib/gis/gdal.txt
+++ b/lib/django-1.5/docs/ref/contrib/gis/gdal.txt
@@ -459,19 +459,19 @@
 
    Constructs a :class:`Polygon` from the given bounding-box (a 4-tuple).
 
-   .. method:: __len__
+   .. method:: __len__()
 
    Returns the number of points in a :class:`LineString`, the
    number of rings in a :class:`Polygon`, or the number of geometries in a
    :class:`GeometryCollection`. Not applicable to other geometry types.
 
-   .. method:: __iter__
+   .. method:: __iter__()
 
    Iterates over the points in a :class:`LineString`, the rings in a
    :class:`Polygon`, or the geometries in a :class:`GeometryCollection`.
    Not applicable to other geometry types.
 
-   .. method:: __getitem__
+   .. method:: __getitem__()
 
    Returns the point at the specified index for a :class:`LineString`, the
    interior ring at the specified index for a :class:`Polygon`, or the geometry
@@ -677,7 +677,7 @@
    Returns ``True`` if this geometry overlaps the other, otherwise returns
    ``False``.
 
-   .. method:: boundary
+   .. method:: boundary()
 
    The boundary of this geometry, as a new :class:`OGRGeometry` object.
 
@@ -686,22 +686,22 @@
    The smallest convex polygon that contains this geometry, as a new
    :class:`OGRGeometry` object.
 
-   .. method:: difference
+   .. method:: difference()
 
    Returns the region consisting of the difference of this geometry and
    the other, as a new :class:`OGRGeometry` object.
 
-   .. method:: intersection
+   .. method:: intersection()
 
    Returns the region consisting of the intersection of this geometry and
    the other, as a new :class:`OGRGeometry` object.
 
-   .. method:: sym_difference
+   .. method:: sym_difference()
 
    Returns the region consisting of the symmetric difference of this
    geometry and the other, as a new :class:`OGRGeometry` object.
 
-   .. method:: union
+   .. method:: union()
 
    Returns the region consisting of the union of this geometry and
    the other, as a new :class:`OGRGeometry` object.
@@ -873,7 +873,7 @@
    A string representing this envelope as a polygon in WKT format.
 
 
-   .. method:: expand_to_include(self, *args)
+   .. method:: expand_to_include(*args)
 
 Coordinate System Objects
 =========================
diff --git a/lib/django-1.5/docs/ref/contrib/gis/geos.txt b/lib/django-1.5/docs/ref/contrib/gis/geos.txt
index 4d44638..e3c7ce7 100644
--- a/lib/django-1.5/docs/ref/contrib/gis/geos.txt
+++ b/lib/django-1.5/docs/ref/contrib/gis/geos.txt
@@ -311,7 +311,7 @@
 .. attribute:: GEOSGeometry.ogr
 
 Returns an :class:`~django.contrib.gis.gdal.OGRGeometry` object
-correspondg to the GEOS geometry.
+corresponding to the GEOS geometry.
 
 .. note::
 
@@ -565,7 +565,7 @@
 
 .. method:: GEOSGeometry.transform(ct, clone=False)
 
-Transforms the geometry according to the given coordinate transformation paramter
+Transforms the geometry according to the given coordinate transformation parameter
 (``ct``), which may be an integer SRID, spatial reference WKT string,
 a PROJ.4 string, a :class:`~django.contrib.gis.gdal.SpatialReference` object, or a
 :class:`~django.contrib.gis.gdal.CoordTransform` object. By default, the geometry
diff --git a/lib/django-1.5/docs/ref/contrib/gis/install/spatialite.txt b/lib/django-1.5/docs/ref/contrib/gis/install/spatialite.txt
index 941d559..9988788 100644
--- a/lib/django-1.5/docs/ref/contrib/gis/install/spatialite.txt
+++ b/lib/django-1.5/docs/ref/contrib/gis/install/spatialite.txt
@@ -56,7 +56,7 @@
 __ http://www.sqlite.org/rtree.html
 __ http://www.sqlite.org/download.html
 
-.. _spatialitebuild :
+.. _spatialitebuild:
 
 SpatiaLite library (``libspatialite``) and tools (``spatialite``)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -74,15 +74,17 @@
 ``configure`` script, make, and install for the SpatiaLite library::
 
     $ cd libspatialite-amalgamation-2.3.1
-    $ ./configure # May need to modified, see notes below.
+    $ ./configure # May need to be modified, see notes below.
     $ make
     $ sudo make install
-    $ cd .... _spatialite
+    $ cd ..
+
+.. _spatialite_tools:
 
 Finally, do the same for the SpatiaLite tools::
 
     $ cd spatialite-tools-2.3.1
-    $ ./configure # May need to modified, see notes below.
+    $ ./configure # May need to be modified, see notes below.
     $ make
     $ sudo make install
     $ cd ..
diff --git a/lib/django-1.5/docs/ref/databases.txt b/lib/django-1.5/docs/ref/databases.txt
index f4656ad..3578a93 100644
--- a/lib/django-1.5/docs/ref/databases.txt
+++ b/lib/django-1.5/docs/ref/databases.txt
@@ -156,8 +156,8 @@
 
 Until MySQL 5.5.4, the default engine was MyISAM_ [#]_. The main drawbacks of
 MyISAM are that it doesn't support transactions or enforce foreign-key
-constraints. On the plus side, it's currently the only engine that supports
-full-text indexing and searching.
+constraints. On the plus side, it was the only engine that supported full-text
+indexing and searching until MySQL 5.6.4.
 
 Since MySQL 5.5.5, the default storage engine is InnoDB_. This engine is fully
 transactional and supports foreign key references. It's probably the best
@@ -247,7 +247,7 @@
 .. _documented thoroughly: http://dev.mysql.com/doc/refman/5.0/en/charset.html
 
 By default, with a UTF-8 database, MySQL will use the
-``utf8_general_ci_swedish`` collation. This results in all string equality
+``utf8_general_ci`` collation. This results in all string equality
 comparisons being done in a *case-insensitive* manner. That is, ``"Fred"`` and
 ``"freD"`` are considered equal at the database level. If you have a unique
 constraint on a field, it would be illegal to try to insert both ``"aa"`` and
@@ -284,7 +284,7 @@
 recommended solution.
 
 Should you decide to use ``utf8_bin`` collation for some of your tables with
-MySQLdb 1.2.1p2 or 1.2.2, you should still use ``utf8_collation_ci_swedish``
+MySQLdb 1.2.1p2 or 1.2.2, you should still use ``utf8_general_ci``
 (the default) collation for the ``django.contrib.sessions.models.Session``
 table (usually called ``django_session``) and the
 ``django.contrib.admin.models.LogEntry`` table (usually called
@@ -429,6 +429,22 @@
 statement. If ``select_for_update()`` is used with ``nowait=True`` then a
 ``DatabaseError`` will be raised.
 
+Automatic typecasting can cause unexpected results
+--------------------------------------------------
+
+When performing a query on a string type, but with an integer value, MySQL will
+coerce the types of all values in the table to an integer before performing the
+comparison. If your table contains the values ``'abc'``, ``'def'`` and you
+query for ``WHERE mycolumn=0``, both rows will match. Similarly, ``WHERE mycolumn=1``
+will match the value ``'abc1'``. Therefore, string type fields included in Django
+will always cast the value to a string before using it in a query.
+
+If you implement custom model fields that inherit from :class:`~django.db.models.Field`
+directly, are overriding :meth:`~django.db.models.Field.get_prep_value`, or use
+:meth:`extra() <django.db.models.query.QuerySet.extra>` or
+:meth:`raw() <django.db.models.Manager.raw>`, you should ensure that you
+perform the appropriate typecasting.
+
 .. _sqlite-notes:
 
 SQLite notes
@@ -699,6 +715,22 @@
 Oracle imposes a name length limit of 30 characters. To accommodate this, the
 backend truncates database identifiers to fit, replacing the final four
 characters of the truncated name with a repeatable MD5 hash value.
+Additionally, the backend turns database identifiers to all-uppercase.
+
+To prevent these transformations (this is usually required only when dealing
+with legacy databases or accessing tables which belong to other users), use
+a quoted name as the value for ``db_table``::
+
+    class LegacyModel(models.Model):
+        class Meta:
+       	    db_table = '"name_left_in_lowercase"'
+
+    class ForeignModel(models.Model):
+        class Meta:
+       	    db_table = '"OTHER_USER"."NAME_ONLY_SEEMS_OVER_30"'
+
+Quoted names can also be used with Django's other supported database 
+backends; except for Oracle, however, the quotes have no effect.
 
 When running syncdb, an ``ORA-06552`` error may be encountered if
 certain Oracle keywords are used as the name of a model field or the
diff --git a/lib/django-1.5/docs/ref/django-admin.txt b/lib/django-1.5/docs/ref/django-admin.txt
index 4552440..07a7bb1 100644
--- a/lib/django-1.5/docs/ref/django-admin.txt
+++ b/lib/django-1.5/docs/ref/django-admin.txt
@@ -664,6 +664,8 @@
 
 The development server automatically reloads Python code for each request, as
 needed. You don't need to restart the server for code changes to take effect.
+However, some actions like adding files or compiling translation files don't
+trigger a restart, so you'll have to restart the server in these cases.
 
 When you start the server, and each time you change Python code while the
 server is running, the server will validate all of your installed models. (See
diff --git a/lib/django-1.5/docs/ref/files/storage.txt b/lib/django-1.5/docs/ref/files/storage.txt
index b974251..1216e85 100644
--- a/lib/django-1.5/docs/ref/files/storage.txt
+++ b/lib/django-1.5/docs/ref/files/storage.txt
@@ -39,7 +39,7 @@
     .. note::
 
         The ``FileSystemStorage.delete()`` method will not raise
-        raise an exception if the given file name does not exist.
+        an exception if the given file name does not exist.
 
 The Storage Class
 -----------------
diff --git a/lib/django-1.5/docs/ref/forms/api.txt b/lib/django-1.5/docs/ref/forms/api.txt
index e6b0d3e..98c6d91 100644
--- a/lib/django-1.5/docs/ref/forms/api.txt
+++ b/lib/django-1.5/docs/ref/forms/api.txt
@@ -324,7 +324,7 @@
 ``as_p()``
 ~~~~~~~~~~
 
-.. method:: Form.as_p
+.. method:: Form.as_p()
 
     ``as_p()`` renders the form as a series of ``<p>`` tags, with each ``<p>``
     containing one field::
@@ -341,7 +341,7 @@
 ``as_ul()``
 ~~~~~~~~~~~
 
-.. method:: Form.as_ul
+.. method:: Form.as_ul()
 
     ``as_ul()`` renders the form as a series of ``<li>`` tags, with each
     ``<li>`` containing one field. It does *not* include the ``<ul>`` or
@@ -360,7 +360,7 @@
 ``as_table()``
 ~~~~~~~~~~~~~~
 
-.. method:: Form.as_table
+.. method:: Form.as_table()
 
     Finally, ``as_table()`` outputs the form as an HTML ``<table>``. This is
     exactly the same as ``print``. In fact, when you ``print`` a form object,
@@ -752,7 +752,7 @@
 Testing for multipart forms
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: Form.is_multipart
+.. method:: Form.is_multipart()
 
 If you're writing reusable views or templates, you may not know ahead of time
 whether your form is a multipart form or not. The ``is_multipart()`` method
diff --git a/lib/django-1.5/docs/ref/forms/validation.txt b/lib/django-1.5/docs/ref/forms/validation.txt
index 6690ada..060fa3b 100644
--- a/lib/django-1.5/docs/ref/forms/validation.txt
+++ b/lib/django-1.5/docs/ref/forms/validation.txt
@@ -82,6 +82,10 @@
   cleaned data if you override this method (by default, ``Form.clean()``
   just returns ``self.cleaned_data``).
 
+  Since the field validation method have been run by the time ``clean()`` is
+  called, you also have access to the form's ``errors`` attribute which
+  contains all the errors raised by previous steps.
+
   Note that any errors raised by your ``Form.clean()`` override will not
   be associated with any field in particular. They go into a special
   "field" (called ``__all__``), which you can access via the
@@ -98,7 +102,8 @@
 for each field in the form (in the order they are declared in the form
 definition), the ``Field.clean()`` method (or its override) is run, then
 ``clean_<fieldname>()``. Finally, once those two methods are run for every
-field, the ``Form.clean()`` method, or its override, is executed.
+field, the ``Form.clean()`` method, or its override, is executed, no matter if
+the previous methods have raised errors or not.
 
 Examples of each of these methods are provided below.
 
@@ -107,15 +112,6 @@
 field-specific cleaning method is not called. However, the cleaning methods
 for all remaining fields are still executed.
 
-The ``clean()`` method for the ``Form`` class or subclass is always run. If
-that method raises a ``ValidationError``, ``cleaned_data`` will be an empty
-dictionary.
-
-The previous paragraph means that if you are overriding ``Form.clean()``, you
-should iterate through ``self.cleaned_data.items()``, possibly considering the
-``_errors`` dictionary attribute on the form as well. In this way, you will
-already know which fields have passed their individual validation requirements.
-
 .. _described later:
 
 Form subclasses and modifying field errors
@@ -176,9 +172,12 @@
 ~~~~~~~~~~~~~~~~
 
 Django's form (and model) fields support use of simple utility functions and
-classes known as validators. These can be passed to a field's constructor, via
-the field's ``validators`` argument, or defined on the Field class itself with
-the ``default_validators`` attribute.
+classes known as validators. A validator is merely a callable object or
+function that takes a value and simply returns nothing if the value is valid or
+raises a :exc:`~django.core.exceptions.ValidationError` if not. These can be
+passed to a field's constructor, via the field's ``validators`` argument, or
+defined on the :class:`~django.forms.Field` class itself with the
+``default_validators`` attribute.
 
 Simple validators can be used to validate values inside the field, let's have
 a look at Django's ``EmailField``::
@@ -200,6 +199,13 @@
     email = forms.CharField(validators=[validators.validate_email],
             error_messages={'invalid': _('Enter a valid email address.')})
 
+Common cases such as validating against an email or a regular expression can be
+handled using existing validator classes available in Django. For example,
+``validators.validate_slug`` is an instance of
+a :class:`~django.core.validators.RegexValidator` constructed with the first
+argument being the pattern: ``^[-a-zA-Z0-9_]+$``. See the section on
+:doc:`writing validators </ref/validators>` to see a list of what is already
+available and for an example of how to write a validator.
 
 Form field default cleaning
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -272,7 +278,7 @@
 Cleaning and validating fields that depend on each other
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. method:: django.forms.Form.clean
+.. method:: django.forms.Form.clean()
 
 Suppose we add another requirement to our contact form: if the ``cc_myself``
 field is ``True``, the ``subject`` must contain the word ``"help"``. We are
diff --git a/lib/django-1.5/docs/ref/forms/widgets.txt b/lib/django-1.5/docs/ref/forms/widgets.txt
index 13d2fd5..3cc2470 100644
--- a/lib/django-1.5/docs/ref/forms/widgets.txt
+++ b/lib/django-1.5/docs/ref/forms/widgets.txt
@@ -216,7 +216,7 @@
         The 'value' given is not guaranteed to be valid input, therefore
         subclass implementations should program defensively.
 
-    .. method:: value_from_datadict(self, data, files, name)
+    .. method:: value_from_datadict(data, files, name)
 
         Given a dictionary of data and this widget's name, returns the value
         of this widget. Returns ``None`` if a value wasn't provided.
diff --git a/lib/django-1.5/docs/ref/models/fields.txt b/lib/django-1.5/docs/ref/models/fields.txt
index 3ac0acf..187aeee 100644
--- a/lib/django-1.5/docs/ref/models/fields.txt
+++ b/lib/django-1.5/docs/ref/models/fields.txt
@@ -550,7 +550,8 @@
     Optional. A storage object, which handles the storage and retrieval of your
     files. See :doc:`/topics/files` for details on how to provide this object.
 
-The default form widget for this field is a :class:`~django.forms.FileInput`.
+The default form widget for this field is a
+:class:`~django.forms.ClearableFileInput`.
 
 Using a :class:`FileField` or an :class:`ImageField` (see below) in a model
 takes a few steps:
@@ -798,6 +799,9 @@
 columns in your database. As with other fields, you can change the maximum
 length using the :attr:`~CharField.max_length` argument.
 
+The default form widget for this field is a
+:class:`~django.forms.ClearableFileInput`.
+
 ``IntegerField``
 ----------------
 
@@ -1019,14 +1023,13 @@
 .. attribute:: ForeignKey.limit_choices_to
 
     A dictionary of lookup arguments and values (see :doc:`/topics/db/queries`)
-    that limit the available admin or ModelForm choices for this object. Use
-    this with functions from the Python ``datetime`` module to limit choices of
-    objects by date. For example::
+    that limit the available admin or :class:`ModelForm <django.forms.ModelForm>`
+    choices for this object. For example::
 
-        limit_choices_to = {'pub_date__lte': datetime.date.today}
+        staff_member = models.ForeignKey(User, limit_choices_to={'is_staff': True})
 
-    only allows the choice of related objects with a ``pub_date`` before the
-    current date to be chosen.
+    causes the corresponding field on the ``ModelForm`` to list only ``Users``
+    that have ``is_staff=True``.
 
     Instead of a dictionary this can also be a :class:`~django.db.models.Q`
     object for more :ref:`complex queries <complex-lookups-with-q>`. However,
@@ -1223,6 +1226,27 @@
 including all the options regarding :ref:`recursive <recursive-relationships>`
 and :ref:`lazy <lazy-relationships>` relationships.
 
+If you do not specify the the :attr:`~ForeignKey.related_name` argument for
+the ``OneToOneField``, Django will use the lower-case name of the current model
+as default value.
+
+With the following example::
+
+    from django.db import models
+    from django.contrib.auth.models import User
+
+    class MySpecialUser(models.Model):
+        user = models.OneToOneField(User)
+        supervisor = models.OneToOneField(User, related_name='supervisor_of')
+
+your resulting ``User`` model will have the following attributes::
+
+    >>> user = User.objects.get(pk=1)
+    >>> hasattr(user, 'myspecialuser')
+    True
+    >>> hasattr(user, 'supervisor_of')
+    True
+
 .. _onetoone-arguments:
 
 Additionally, ``OneToOneField`` accepts all of the extra arguments
@@ -1235,3 +1259,6 @@
     link back to the parent class, rather than the extra
     ``OneToOneField`` which would normally be implicitly created by
     subclassing.
+
+See :doc:`One-to-one relationships </topics/db/examples/one_to_one>` for usage
+examples of ``OneToOneField``.
diff --git a/lib/django-1.5/docs/ref/models/options.txt b/lib/django-1.5/docs/ref/models/options.txt
index c2a3430..e2a502c 100644
--- a/lib/django-1.5/docs/ref/models/options.txt
+++ b/lib/django-1.5/docs/ref/models/options.txt
@@ -67,6 +67,18 @@
     the table name via ``db_table``, particularly if you are using the MySQL
     backend. See the :ref:`MySQL notes <mysql-notes>` for more details.
 
+.. admonition:: Table name quoting for Oracle
+
+   In order to to meet the 30-char limitation Oracle has on table names,
+   and match the usual conventions for Oracle databases, Django may shorten
+   table names and turn them all-uppercase. To prevent such transformations,
+   use a quoted name as the value for ``db_table``::
+
+       db_table = '"name_left_in_lowercase"'
+
+   Such quoted names can also be used with Django's other supported database 
+   backends; except for Oracle, however, the quotes have no effect. See the
+   :ref:`Oracle notes <oracle-notes>` for more details.
 
 ``db_tablespace``
 -----------------
diff --git a/lib/django-1.5/docs/ref/models/querysets.txt b/lib/django-1.5/docs/ref/models/querysets.txt
index ac7ea2d..6aa2a96 100644
--- a/lib/django-1.5/docs/ref/models/querysets.txt
+++ b/lib/django-1.5/docs/ref/models/querysets.txt
@@ -1068,6 +1068,16 @@
 
       Entry.objects.extra(where=['headline=%s'], params=['Lennon'])
 
+.. warning::
+
+    If you are performing queries on MySQL, note that MySQL's silent type coercion
+    may cause unexpected results when mixing types. If you query on a string
+    type column, but with an integer value, MySQL will coerce the types of all values
+    in the table to an integer before performing the comparison. For example, if your
+    table contains the values ``'abc'``, ``'def'`` and you query for ``WHERE mycolumn=0``,
+    both rows will match. To prevent this, perform the correct typecasting
+    before using the value in a query.
+
 defer
 ~~~~~
 
@@ -1397,11 +1407,11 @@
 object won't appear in a subsequent :meth:`~django.db.models.query.QuerySet.get`
 call.
 
-Finally, a word on using ``get_or_create()`` in Django views: please make sure
-to use it only in ``POST`` requests unless you have a good reason not to
-``GET`` requests shouldn't have any effect on data; use ``POST`` whenever a
-request to a page as a side effect on your data. For more, see `Safe methods`_
-in the HTTP spec.
+Finally, a word on using ``get_or_create()`` in Django views. Please make sure
+to use it only in ``POST`` requests unless you have a good reason not to.
+``GET`` requests shouldn't have any effect on data. Instead, use ``POST``
+whenever a request to a page has a side effect on your data. For more, see
+`Safe methods`_ in the HTTP spec.
 
 .. _Safe methods: http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.1.1
 
diff --git a/lib/django-1.5/docs/ref/settings.txt b/lib/django-1.5/docs/ref/settings.txt
index 1db9b40..0f04629 100644
--- a/lib/django-1.5/docs/ref/settings.txt
+++ b/lib/django-1.5/docs/ref/settings.txt
@@ -469,8 +469,10 @@
 
 If you're using PostgreSQL, by default (empty :setting:`HOST`), the connection
 to the database is done through UNIX domain sockets ('local' lines in
-``pg_hba.conf``). If you want to connect through TCP sockets, set
-:setting:`HOST` to 'localhost' or '127.0.0.1' ('host' lines in ``pg_hba.conf``).
+``pg_hba.conf``). If your UNIX domain socket is not in the standard location,
+use the same value of ``unix_socket_directory`` from ``postgresql.conf``.
+If you want to connect through TCP sockets, set :setting:`HOST` to 'localhost'
+or '127.0.0.1' ('host' lines in ``pg_hba.conf``).
 On Windows, you should always define :setting:`HOST`, as UNIX domain sockets
 are not available.
 
@@ -1277,29 +1279,19 @@
 Generally, the default value should suffice. Only set this setting if you want
 to restrict language selection to a subset of the Django-provided languages.
 
-If you define a custom :setting:`LANGUAGES` setting, it's OK to mark the
-languages as translation strings (as in the default value referred to above)
--- but use a "dummy" ``gettext()`` function, not the one in
-``django.utils.translation``. You should *never* import
-``django.utils.translation`` from within your settings file, because that
-module in itself depends on the settings, and that would cause a circular
-import.
+If you define a custom :setting:`LANGUAGES` setting, you can mark the
+language names as translation strings using the
+:func:`~django.utils.translation.ugettext_lazy` function.
 
-The solution is to use a "dummy" ``gettext()`` function. Here's a sample
-settings file::
+Here's a sample settings file::
 
-    gettext = lambda s: s
+    from django.utils.translation import ugettext_lazy as _
 
     LANGUAGES = (
-        ('de', gettext('German')),
-        ('en', gettext('English')),
+        ('de', _('German')),
+        ('en', _('English')),
     )
 
-With this arrangement, ``django-admin.py makemessages`` will still find and
-mark these strings for translation, but the translation won't happen at
-runtime -- so you'll have to remember to wrap the languages in the *real*
-``gettext()`` in any code that uses :setting:`LANGUAGES` at runtime.
-
 .. setting:: LOCALE_PATHS
 
 LOCALE_PATHS
diff --git a/lib/django-1.5/docs/ref/signals.txt b/lib/django-1.5/docs/ref/signals.txt
index 7cce1b7..bb6eb10 100644
--- a/lib/django-1.5/docs/ref/signals.txt
+++ b/lib/django-1.5/docs/ref/signals.txt
@@ -288,7 +288,7 @@
     >>> t = Topping.objects.create(...)
     >>> p.toppings.add(t)
 
-the arguments sent to a :data:`m2m_changed` handler (``topppings_changed`` in
+the arguments sent to a :data:`m2m_changed` handler (``toppings_changed`` in
 the example above) would be:
 
 ==============  ============================================================
diff --git a/lib/django-1.5/docs/ref/template-response.txt b/lib/django-1.5/docs/ref/template-response.txt
index 5c13ec7..952a8d4 100644
--- a/lib/django-1.5/docs/ref/template-response.txt
+++ b/lib/django-1.5/docs/ref/template-response.txt
@@ -106,7 +106,7 @@
 
     Override this method in order to customize template rendering.
 
-.. method:: SimpleTemplateResponse.add_post_render_callback
+.. method:: SimpleTemplateResponse.add_post_render_callback()
 
     Add a callback that will be invoked after rendering has taken
     place. This hook can be used to defer certain processing
@@ -139,10 +139,10 @@
 
 .. class:: TemplateResponse()
 
-   TemplateResponse is a subclass of
-   :class:`~django.template.response.SimpleTemplateResponse` that uses
-   a :class:`~django.template.RequestContext` instead of
-   a :class:`~django.template.Context`.
+    ``TemplateResponse`` is a subclass of
+    :class:`~django.template.response.SimpleTemplateResponse` that uses
+    a :class:`~django.template.RequestContext` instead of
+    a :class:`~django.template.Context`.
 
 Methods
 -------
@@ -162,7 +162,9 @@
     ``context``
         A dictionary of values to add to the template context. By default,
         this is an empty dictionary. :class:`~django.template.Context` objects
-        are also accepted as ``context`` values.
+        are also accepted as ``context`` values. If you pass a
+        :class:`~django.template.Context` instance or subclass, it will be used
+        instead of creating a new :class:`~django.template.RequestContext`.
 
     ``status``
         The HTTP Status code for the response.
diff --git a/lib/django-1.5/docs/ref/templates/api.txt b/lib/django-1.5/docs/ref/templates/api.txt
index f77455d..dd99aad 100644
--- a/lib/django-1.5/docs/ref/templates/api.txt
+++ b/lib/django-1.5/docs/ref/templates/api.txt
@@ -310,10 +310,12 @@
     >>> c = Context()
     >>> c['foo'] = 'first level'
     >>> c.push()
+    {}
     >>> c['foo'] = 'second level'
     >>> c['foo']
     'second level'
     >>> c.pop()
+    {'foo': 'second level'}
     >>> c['foo']
     'first level'
     >>> c['foo'] = 'overwritten'
diff --git a/lib/django-1.5/docs/ref/templates/builtins.txt b/lib/django-1.5/docs/ref/templates/builtins.txt
index 1572b15..02177eb 100644
--- a/lib/django-1.5/docs/ref/templates/builtins.txt
+++ b/lib/django-1.5/docs/ref/templates/builtins.txt
@@ -1201,6 +1201,10 @@
 Uses a similar format as PHP's ``date()`` function (http://php.net/date)
 with some differences.
 
+.. note::
+    These format characters are not used in Django outside of templates. They
+    were designed to be compatible with PHP to ease transitioning for designers.
+
 Available format strings:
 
 ================  ========================================  =====================
@@ -1940,7 +1944,7 @@
 striptags
 ^^^^^^^^^
 
-Strips all [X]HTML tags.
+Makes all possible efforts to strip all [X]HTML tags.
 
 For example::
 
@@ -1949,6 +1953,16 @@
 If ``value`` is ``"<b>Joel</b> <button>is</button> a <span>slug</span>"``, the
 output will be ``"Joel is a slug"``.
 
+.. admonition:: No safety guarantee
+
+    Note that ``striptags`` doesn't give any guarantee about its output being
+    entirely HTML safe, particularly with non valid HTML input. So **NEVER**
+    apply the ``safe`` filter to a ``striptags`` output.
+    If you are looking for something more robust, you can use the ``bleach``
+    Python library, notably its `clean`_ method.
+
+.. _clean: http://bleach.readthedocs.org/en/latest/clean.html
+
 .. templatefilter:: time
 
 time
@@ -1958,7 +1972,7 @@
 
 Given format can be the predefined one :setting:`TIME_FORMAT`, or a custom
 format, same as the :tfilter:`date` filter. Note that the predefined format
-is locale-dependant.
+is locale-dependent.
 
 The time filter will only accept parameters in the format string that relate
 to the time of day, not the date (for obvious reasons). If you need to
diff --git a/lib/django-1.5/docs/ref/unicode.txt b/lib/django-1.5/docs/ref/unicode.txt
index 3743d0a..2651073 100644
--- a/lib/django-1.5/docs/ref/unicode.txt
+++ b/lib/django-1.5/docs/ref/unicode.txt
@@ -17,7 +17,7 @@
 a more restrictive encoding -- for example, latin1 (iso8859-1) -- you won't be
 able to store certain characters in the database, and information will be lost.
 
-* MySQL users, refer to the `MySQL manual`_ (section 9.1.3.2 for MySQL 5.1)
+* MySQL users, refer to the `MySQL manual`_ (section 10.1.3.2 for MySQL 5.1)
   for details on how to set or alter the database character set encoding.
 
 * PostgreSQL users, refer to the `PostgreSQL manual`_ (section 21.2.2 in
diff --git a/lib/django-1.5/docs/ref/utils.txt b/lib/django-1.5/docs/ref/utils.txt
index 5d234e7..d489545 100644
--- a/lib/django-1.5/docs/ref/utils.txt
+++ b/lib/django-1.5/docs/ref/utils.txt
@@ -616,15 +616,23 @@
 
 .. function:: strip_tags(value)
 
-    Removes anything that looks like an html tag from the string, that is
-    anything contained within ``<>``.
+    Tries to remove anything that looks like an HTML tag from the string, that
+    is anything contained within ``<>``.
+    Absolutely NO guaranty is provided about the resulting string being entirely
+    HTML safe. So NEVER mark safe the result of a ``strip_tag`` call without
+    escaping it first, for example with :func:`~django.utils.html.escape`.
 
     For example::
 
         strip_tags(value)
 
-    If ``value`` is ``"<b>Joel</b> <button>is</button> a <span>slug</span>"`` the
-    return value will be ``"Joel is a slug"``.
+    If ``value`` is ``"<b>Joel</b> <button>is</button> a <span>slug</span>"``
+    the return value will be ``"Joel is a slug"``.
+
+    If you are looking for a more robust solution, take a look at the `bleach`_
+    Python library.
+
+    .. _bleach: https://pypi.python.org/pypi/bleach
 
 .. function:: remove_tags(value, tags)
 
@@ -746,6 +754,17 @@
 
     Can be called multiple times on a single string.
 
+    String marked safe will become unsafe again if modified. For example::
+
+        >>> mystr = '<b>Hello World</b>   '
+        >>> mystr = mark_safe(mystr)
+        >>> type(mystr)
+        <class 'django.utils.safestring.SafeBytes'>
+
+        >>> mystr = mystr.strip()  # removing whitespace
+        >>> type(mystr)
+        <type 'str'>
+
 .. function:: mark_for_escaping(s)
 
     Explicitly mark a string as requiring HTML escaping upon output. Has no
@@ -829,7 +848,7 @@
 
 .. function:: ngettext_lazy(singular, plural, number)
 .. function:: ungettext_lazy(singular, plural, number)
-.. function:: npgettext_lazy(singular, plural, number)
+.. function:: npgettext_lazy(context, singular, plural, number)
 
     Same as the non-lazy versions above, but using lazy execution.
 
diff --git a/lib/django-1.5/docs/releases/1.4-alpha-1.txt b/lib/django-1.5/docs/releases/1.4-alpha-1.txt
index 36b70ba..94e875c 100644
--- a/lib/django-1.5/docs/releases/1.4-alpha-1.txt
+++ b/lib/django-1.5/docs/releases/1.4-alpha-1.txt
@@ -337,9 +337,10 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-Two new function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, were added to allow designating the
-local variables and POST parameters which may contain sensitive
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
 information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
diff --git a/lib/django-1.5/docs/releases/1.4-beta-1.txt b/lib/django-1.5/docs/releases/1.4-beta-1.txt
index dd5d9ab..18d9186 100644
--- a/lib/django-1.5/docs/releases/1.4-beta-1.txt
+++ b/lib/django-1.5/docs/releases/1.4-beta-1.txt
@@ -375,9 +375,10 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-Two new function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, were added to allow designating the
-local variables and POST parameters which may contain sensitive
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
 information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
diff --git a/lib/django-1.5/docs/releases/1.4.10.txt b/lib/django-1.5/docs/releases/1.4.10.txt
new file mode 100644
index 0000000..7477ee5
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.4.10.txt
@@ -0,0 +1,14 @@
+===========================
+Django 1.4.10 release notes
+===========================
+
+*November 6, 2013*
+
+Django 1.4.10 fixes a Python-compatibility bug in the 1.4 series.
+
+Python compatibility
+--------------------
+
+Django 1.4.9 inadvertently introduced issues with Python 2.5 compatibility.
+Django 1.4.10 restores Python 2.5 compatibility. This was issue #21362 in
+Django's Trac.
diff --git a/lib/django-1.5/docs/releases/1.4.11.txt b/lib/django-1.5/docs/releases/1.4.11.txt
new file mode 100644
index 0000000..2419454
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.4.11.txt
@@ -0,0 +1,110 @@
+===========================
+Django 1.4.11 release notes
+===========================
+
+*April 21, 2014*
+
+Django 1.4.11 fixes three security issues in 1.4.10. Additionally,
+Django's vendored version of six, :mod:`django.utils.six`, has been
+upgraded to the latest release (1.6.1).
+
+Unexpected code execution using ``reverse()``
+=============================================
+
+Django's URL handling is based on a mapping of regex patterns
+(representing the URLs) to callable views, and Django's own processing
+consists of matching a requested URL against those patterns to
+determine the appropriate view to invoke.
+
+Django also provides a convenience function --
+:func:`~django.core.urlresolvers.reverse` -- which performs this process
+in the opposite direction. The ``reverse()`` function takes
+information about a view and returns a URL which would invoke that
+view. Use of ``reverse()`` is encouraged for application developers,
+as the output of ``reverse()`` is always based on the current URL
+patterns, meaning developers do not need to change other code when
+making changes to URLs.
+
+One argument signature for ``reverse()`` is to pass a dotted Python
+path to the desired view. In this situation, Django will import the
+module indicated by that dotted path as part of generating the
+resulting URL. If such a module has import-time side effects, those
+side effects will occur.
+
+Thus it is possible for an attacker to cause unexpected code
+execution, given the following conditions:
+
+1. One or more views are present which construct a URL based on user
+   input (commonly, a "next" parameter in a querystring indicating
+   where to redirect upon successful completion of an action).
+
+2. One or more modules are known to an attacker to exist on the
+   server's Python import path, which perform code execution with side
+   effects on importing.
+
+To remedy this, ``reverse()`` will now only accept and import dotted
+paths based on the view-containing modules listed in the project's :doc:`URL
+pattern configuration </topics/http/urls>`, so as to ensure that only modules
+the developer intended to be imported in this fashion can or will be imported.
+
+Caching of anonymous pages could reveal CSRF token
+==================================================
+
+Django includes both a :doc:`caching framework </topics/cache>` and a system
+for :doc:`preventing cross-site request forgery (CSRF) attacks
+</ref/contrib/csrf/>`. The CSRF-protection system is based on a random nonce
+sent to the client in a cookie which must be sent by the client on future
+requests and, in forms, a hidden value which must be submitted back with the
+form.
+
+The caching framework includes an option to cache responses to
+anonymous (i.e., unauthenticated) clients.
+
+When the first anonymous request to a given page is by a client which
+did not have a CSRF cookie, the cache framework will also cache the
+CSRF cookie and serve the same nonce to other anonymous clients who
+do not have a CSRF cookie. This can allow an attacker to obtain a
+valid CSRF cookie value and perform attacks which bypass the check for
+the cookie.
+
+To remedy this, the caching framework will no longer cache such
+responses. The heuristic for this will be:
+
+1. If the incoming request did not submit any cookies, and
+
+2. If the response did send one or more cookies, and
+
+3. If the ``Vary: Cookie`` header is set on the response, then the
+   response will not be cached.
+
+MySQL typecasting
+=================
+
+The MySQL database is known to "typecast" on certain queries; for
+example, when querying a table which contains string values, but using
+a query which filters based on an integer value, MySQL will first
+silently coerce the strings to integers and return a result based on that.
+
+If a query is performed without first converting values to the
+appropriate type, this can produce unexpected results, similar to what
+would occur if the query itself had been manipulated.
+
+Django's model field classes are aware of their own types and most
+such classes perform explicit conversion of query arguments to the
+correct database-level type before querying. However, three model
+field classes did not correctly convert their arguments:
+
+* :class:`~django.db.models.FilePathField`
+* :class:`~django.db.models.GenericIPAddressField`
+* :class:`~django.db.models.IPAddressField`
+
+These three fields have been updated to convert their arguments to the
+correct types before querying.
+
+Additionally, developers of custom model fields are now warned via
+documentation to ensure their custom field classes will perform
+appropriate type conversions, and users of the :meth:`raw()
+<django.db.models.query.QuerySet.raw>` and :meth:`extra()
+<django.db.models.query.QuerySet.extra>` query methods -- which allow the
+developer to supply raw SQL or SQL fragments -- will be advised to ensure they
+perform appropriate manual type conversions prior to executing queries.
diff --git a/lib/django-1.5/docs/releases/1.4.12.txt b/lib/django-1.5/docs/releases/1.4.12.txt
new file mode 100644
index 0000000..41752a7
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.4.12.txt
@@ -0,0 +1,14 @@
+===========================
+Django 1.4.12 release notes
+===========================
+
+*April 28, 2014*
+
+Django 1.4.12 fixes a regression in the 1.4.11 security release.
+
+Bugfixes
+========
+
+* Restored the ability to :meth:`~django.core.urlresolvers.reverse` views
+  created using :func:`functools.partial()`
+  (`#22486 <http://code.djangoproject.com/ticket/22486>`_)
diff --git a/lib/django-1.5/docs/releases/1.4.13.txt b/lib/django-1.5/docs/releases/1.4.13.txt
new file mode 100644
index 0000000..bcbe460
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.4.13.txt
@@ -0,0 +1,47 @@
+==========================
+Django 1.4.13 release notes
+==========================
+
+*May 13, 2014*
+
+Django 1.4.13 fixes two security issues in 1.4.12.
+
+
+Caches may incorrectly be allowed to store and serve private data
+=================================================================
+In certain situations, Django may allow caches to store private data
+related to a particular session and then serve that data to requests
+with a different session, or no session at all. This can both lead to
+information disclosure, and can be a vector for cache poisoning.
+
+When using Django sessions, Django will set a ``Vary: Cookie`` header to
+ensure caches do not serve cached data to requests from other sessions.
+However, older versions of Internet Explorer (most likely only Internet
+Explorer 6, and Internet Explorer 7 if run on Windows XP or Windows Server
+2003) are unable to handle the ``Vary`` header in combination with many content
+types. Therefore, Django would remove the header if the request was made by
+Internet Explorer.
+
+To remedy this, the special behaviour for these older Internet Explorer versions
+has been removed, and the ``Vary`` header is no longer stripped from the response.
+In addition, modifications to the ``Cache-Control`` header for all Internet Explorer
+requests with a ``Content-Disposition`` header, have also been removed as they
+were found to have similar issues.
+
+
+Malformed redirect URLs from user input not correctly validated
+===============================================================
+The validation for redirects did not correctly validate some malformed URLs,
+which are accepted by some browsers. This allows a user to be redirected to
+an unsafe URL unexpectedly.
+
+Django relies on user input in some cases (e.g.
+:func:`django.contrib.auth.views.login`, ``django.contrib.comments``, and
+:doc:`i18n </topics/i18n/index>`) to redirect the user to an "on success" URL.
+The security checks for these redirects (namely
+``django.util.http.is_safe_url()``) did not correctly validate some malformed
+URLs, such as `http:\\\\\\djangoproject.com`, which are accepted by some browsers
+with more liberal URL parsing.
+
+To remedy this, the validation in ``is_safe_url()`` has been tightened to be able
+to handle and correctly validate these malformed URLs.
diff --git a/lib/django-1.5/docs/releases/1.4.8.txt b/lib/django-1.5/docs/releases/1.4.8.txt
index bec5a4b..08dca40 100644
--- a/lib/django-1.5/docs/releases/1.4.8.txt
+++ b/lib/django-1.5/docs/releases/1.4.8.txt
@@ -1,21 +1,32 @@
 ==========================
-Django 1.4.7 release notes
+Django 1.4.8 release notes
 ==========================
 
 *September 14, 2013*
 
-Django 1.4.8 fixes one security issue present in previous Django releases in
+Django 1.4.8 fixes two security issues present in previous Django releases in
 the 1.4 series.
 
 Denial-of-service via password hashers
 --------------------------------------
 
-In previous versions of Django no limit was imposed on the plaintext
-length of a password. This allows a denial-of-service attack through
+In previous versions of Django, no limit was imposed on the plaintext
+length of a password. This allowed a denial-of-service attack through
 submission of bogus but extremely large passwords, tying up server
 resources performing the (expensive, and increasingly expensive with
 the length of the password) calculation of the corresponding hash.
 
 As of 1.4.8, Django's authentication framework imposes a 4096-byte
-limit on passwords, and will fail authentication with any submitted
+limit on passwords and will fail authentication with any submitted
 password of greater length.
+
+Corrected usage of :func:`~django.views.decorators.debug.sensitive_post_parameters` in :mod:`django.contrib.auth`’s admin
+-------------------------------------------------------------------------------------------------------------------------
+
+The decoration of the ``add_view`` and ``user_change_password`` user admin
+views with :func:`~django.views.decorators.debug.sensitive_post_parameters`
+did not include :func:`~django.utils.decorators.method_decorator` (required
+since the views are methods) resulting in the decorator not being properly
+applied. This usage has been fixed and
+:func:`~django.views.decorators.debug.sensitive_post_parameters` will now
+throw an exception if it's improperly used.
diff --git a/lib/django-1.5/docs/releases/1.4.9.txt b/lib/django-1.5/docs/releases/1.4.9.txt
new file mode 100644
index 0000000..d7d79e7
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.4.9.txt
@@ -0,0 +1,21 @@
+==========================
+Django 1.4.9 release notes
+==========================
+
+*October 24, 2013*
+
+Django 1.4.9 fixes a security-related bug in the 1.4 series and one other
+data corruption bug.
+
+Readdressed denial-of-service via password hashers
+--------------------------------------------------
+
+Django 1.4.8 imposes a 4096-byte limit on passwords in order to mitigate a
+denial-of-service attack through submission of bogus but extremely large
+passwords. In Django 1.4.9, we've reverted this change and instead improved
+the speed of our PBKDF2 algorithm by not rehashing the key on every iteration.
+
+Bugfixes
+========
+
+* Fixed a data corruption bug with ``datetime_safe.datetime.combine`` (#21256).
diff --git a/lib/django-1.5/docs/releases/1.4.txt b/lib/django-1.5/docs/releases/1.4.txt
index d109428..4ff1ce6 100644
--- a/lib/django-1.5/docs/releases/1.4.txt
+++ b/lib/django-1.5/docs/releases/1.4.txt
@@ -507,10 +507,11 @@
 Error report filtering
 ~~~~~~~~~~~~~~~~~~~~~~
 
-We added two function decorators, :func:`sensitive_variables` and
-:func:`sensitive_post_parameters`, to allow designating the local variables
-and POST parameters that may contain sensitive information and should be
-filtered out of error reports.
+We added two function decorators,
+:func:`~django.views.decorators.debug.sensitive_variables` and
+:func:`~django.views.decorators.debug.sensitive_post_parameters`, to allow
+designating the local variables and POST parameters that may contain sensitive
+information and should be filtered out of error reports.
 
 All POST parameters are now systematically filtered out of error reports for
 certain views (``login``, ``password_reset_confirm``, ``password_change`` and
diff --git a/lib/django-1.5/docs/releases/1.5.4.txt b/lib/django-1.5/docs/releases/1.5.4.txt
index 00c56bc..68deeb5 100644
--- a/lib/django-1.5/docs/releases/1.5.4.txt
+++ b/lib/django-1.5/docs/releases/1.5.4.txt
@@ -1,21 +1,40 @@
 ==========================
-Django 1.5.3 release notes
+Django 1.5.4 release notes
 ==========================
 
 *September 14, 2013*
 
 This is Django 1.5.4, the fourth release in the Django 1.5 series. It addresses
-one security issue.
+two security issues and one bug.
 
 Denial-of-service via password hashers
 --------------------------------------
 
-In previous versions of Django no limit was imposed on the plaintext
-length of a password. This allows a denial-of-service attack through
+In previous versions of Django, no limit was imposed on the plaintext
+length of a password. This allowed a denial-of-service attack through
 submission of bogus but extremely large passwords, tying up server
 resources performing the (expensive, and increasingly expensive with
 the length of the password) calculation of the corresponding hash.
 
-As of 1.5.3, Django's authentication framework imposes a 4096-byte
+As of 1.5.4, Django's authentication framework imposes a 4096-byte
 limit on passwords, and will fail authentication with any submitted
 password of greater length.
+
+Corrected usage of :func:`~django.views.decorators.debug.sensitive_post_parameters` in :mod:`django.contrib.auth`’s admin
+-------------------------------------------------------------------------------------------------------------------------
+
+The decoration of the ``add_view`` and ``user_change_password`` user admin
+views with :func:`~django.views.decorators.debug.sensitive_post_parameters`
+did not include :func:`~django.utils.decorators.method_decorator` (required
+since the views are methods) resulting in the decorator not being properly
+applied. This usage has been fixed and
+:func:`~django.views.decorators.debug.sensitive_post_parameters` will now
+throw an exception if it's improperly used.
+
+Bugfixes
+========
+
+* Fixed a bug that prevented a ``QuerySet`` that uses
+  :meth:`~django.db.models.query.QuerySet.prefetch_related` from being pickled
+  and unpickled more than once (the second pickling attempt raised an
+  exception) (#21102).
diff --git a/lib/django-1.5/docs/releases/1.5.5.txt b/lib/django-1.5/docs/releases/1.5.5.txt
new file mode 100644
index 0000000..052caa8
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.5.5.txt
@@ -0,0 +1,39 @@
+==========================
+Django 1.5.5 release notes
+==========================
+
+*October 24, 2013*
+
+Django 1.5.5 fixes a couple security-related bugs and several other bugs in the
+1.5 series.
+
+Readdressed denial-of-service via password hashers
+--------------------------------------------------
+
+Django 1.5.4 imposes a 4096-byte limit on passwords in order to mitigate a
+denial-of-service attack through submission of bogus but extremely large
+passwords. In Django 1.5.5, we've reverted this change and instead improved
+the speed of our PBKDF2 algorithm by not rehashing the key on every iteration.
+
+Properly rotate CSRF token on login
+-----------------------------------
+
+This behavior introduced as a security hardening measure in Django 1.5.2 did
+not work properly and is now fixed.
+
+Bugfixes
+========
+
+* Fixed a data corruption bug with ``datetime_safe.datetime.combine`` (#21256).
+* Fixed a Python 3 incompatability in ``django.utils.text.unescape_entities()``
+  (#21185).
+* Fixed a couple data corruption issues with ``QuerySet`` edge cases under
+  Oracle and MySQL (#21203, #21126).
+* Fixed crashes when using combinations of ``annotate()``,
+  ``select_related()``, and ``only()`` (#16436).
+
+Backwards incompatible changes
+==============================
+
+* The undocumented ``django.core.servers.basehttp.WSGIServerException`` has
+  been removed. Use ``socket.error`` provided by the standard library instead.
diff --git a/lib/django-1.5/docs/releases/1.5.6.txt b/lib/django-1.5/docs/releases/1.5.6.txt
new file mode 100644
index 0000000..1410d14
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.5.6.txt
@@ -0,0 +1,119 @@
+==========================
+Django 1.5.6 release notes
+==========================
+
+*April 21, 2014*
+
+Django 1.5.6 fixes several bugs in 1.5.5, including three security
+issues.
+
+Unexpected code execution using ``reverse()``
+=============================================
+
+Django's URL handling is based on a mapping of regex patterns
+(representing the URLs) to callable views, and Django's own processing
+consists of matching a requested URL against those patterns to
+determine the appropriate view to invoke.
+
+Django also provides a convenience function --
+:func:`~django.core.urlresolvers.reverse` -- which performs this process
+in the opposite direction. The ``reverse()`` function takes
+information about a view and returns a URL which would invoke that
+view. Use of ``reverse()`` is encouraged for application developers,
+as the output of ``reverse()`` is always based on the current URL
+patterns, meaning developers do not need to change other code when
+making changes to URLs.
+
+One argument signature for ``reverse()`` is to pass a dotted Python
+path to the desired view. In this situation, Django will import the
+module indicated by that dotted path as part of generating the
+resulting URL. If such a module has import-time side effects, those
+side effects will occur.
+
+Thus it is possible for an attacker to cause unexpected code
+execution, given the following conditions:
+
+1. One or more views are present which construct a URL based on user
+   input (commonly, a "next" parameter in a querystring indicating
+   where to redirect upon successful completion of an action).
+
+2. One or more modules are known to an attacker to exist on the
+   server's Python import path, which perform code execution with side
+   effects on importing.
+
+To remedy this, ``reverse()`` will now only accept and import dotted
+paths based on the view-containing modules listed in the project's :doc:`URL
+pattern configuration </topics/http/urls>`, so as to ensure that only modules
+the developer intended to be imported in this fashion can or will be imported.
+
+Caching of anonymous pages could reveal CSRF token
+==================================================
+
+Django includes both a :doc:`caching framework </topics/cache>` and a system
+for :doc:`preventing cross-site request forgery (CSRF) attacks
+</ref/contrib/csrf/>`. The CSRF-protection system is based on a random nonce
+sent to the client in a cookie which must be sent by the client on future
+requests and, in forms, a hidden value which must be submitted back with the
+form.
+
+The caching framework includes an option to cache responses to
+anonymous (i.e., unauthenticated) clients.
+
+When the first anonymous request to a given page is by a client which
+did not have a CSRF cookie, the cache framework will also cache the
+CSRF cookie and serve the same nonce to other anonymous clients who
+do not have a CSRF cookie. This can allow an attacker to obtain a
+valid CSRF cookie value and perform attacks which bypass the check for
+the cookie.
+
+To remedy this, the caching framework will no longer cache such
+responses. The heuristic for this will be:
+
+1. If the incoming request did not submit any cookies, and
+
+2. If the response did send one or more cookies, and
+
+3. If the ``Vary: Cookie`` header is set on the response, then the
+   response will not be cached.
+
+MySQL typecasting
+=================
+
+The MySQL database is known to "typecast" on certain queries; for
+example, when querying a table which contains string values, but using
+a query which filters based on an integer value, MySQL will first
+silently coerce the strings to integers and return a result based on that.
+
+If a query is performed without first converting values to the
+appropriate type, this can produce unexpected results, similar to what
+would occur if the query itself had been manipulated.
+
+Django's model field classes are aware of their own types and most
+such classes perform explicit conversion of query arguments to the
+correct database-level type before querying. However, three model
+field classes did not correctly convert their arguments:
+
+* :class:`~django.db.models.FilePathField`
+* :class:`~django.db.models.GenericIPAddressField`
+* :class:`~django.db.models.IPAddressField`
+
+These three fields have been updated to convert their arguments to the
+correct types before querying.
+
+Additionally, developers of custom model fields are now warned via
+documentation to ensure their custom field classes will perform
+appropriate type conversions, and users of the :meth:`raw()
+<django.db.models.query.QuerySet.raw>` and :meth:`extra()
+<django.db.models.query.QuerySet.extra>` query methods -- which allow the
+developer to supply raw SQL or SQL fragments -- will be advised to ensure they
+perform appropriate manual type conversions prior to executing queries.
+
+Bugfixes
+========
+
+* Fixed :class:`~django.contrib.auth.backends.ModelBackend` raising
+  ``UnboundLocalError`` if :func:`~django.contrib.auth.get_user_model`
+  raised an error (#21439).
+
+Additionally, Django's vendored version of six, :mod:`django.utils.six`,
+has been upgraded to the latest release (1.6.1).
diff --git a/lib/django-1.5/docs/releases/1.5.7.txt b/lib/django-1.5/docs/releases/1.5.7.txt
new file mode 100644
index 0000000..452e468
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.5.7.txt
@@ -0,0 +1,14 @@
+==========================
+Django 1.5.7 release notes
+==========================
+
+*April 28, 2014*
+
+Django 1.5.7 fixes a regression in the 1.5.6 security release.
+
+Bugfixes
+========
+
+* Restored the ability to :meth:`~django.core.urlresolvers.reverse` views
+  created using :func:`functools.partial()`
+  (`#22486 <http://code.djangoproject.com/ticket/22486>`_)
diff --git a/lib/django-1.5/docs/releases/1.5.8.txt b/lib/django-1.5/docs/releases/1.5.8.txt
new file mode 100644
index 0000000..0fe3c95
--- /dev/null
+++ b/lib/django-1.5/docs/releases/1.5.8.txt
@@ -0,0 +1,47 @@
+==========================
+Django 1.5.8 release notes
+==========================
+
+*May 13, 2014*
+
+Django 1.5.8  fixes two security issues in 1.5.8.
+
+
+Caches may incorrectly be allowed to store and serve private data
+=================================================================
+In certain situations, Django may allow caches to store private data
+related to a particular session and then serve that data to requests
+with a different session, or no session at all. This can both lead to
+information disclosure, and can be a vector for cache poisoning.
+
+When using Django sessions, Django will set a ``Vary: Cookie`` header to
+ensure caches do not serve cached data to requests from other sessions.
+However, older versions of Internet Explorer (most likely only Internet
+Explorer 6, and Internet Explorer 7 if run on Windows XP or Windows Server
+2003) are unable to handle the ``Vary`` header in combination with many content
+types. Therefore, Django would remove the header if the request was made by
+Internet Explorer.
+
+To remedy this, the special behaviour for these older Internet Explorer versions
+has been removed, and the ``Vary`` header is no longer stripped from the response.
+In addition, modifications to the ``Cache-Control`` header for all Internet Explorer
+requests with a ``Content-Disposition`` header, have also been removed as they
+were found to have similar issues.
+
+
+Malformed redirect URLs from user input not correctly validated
+===============================================================
+The validation for redirects did not correctly validate some malformed URLs,
+which are accepted by some browsers. This allows a user to be redirected to
+an unsafe URL unexpectedly.
+
+Django relies on user input in some cases (e.g.
+:func:`django.contrib.auth.views.login`, ``django.contrib.comments``, and
+:doc:`i18n </topics/i18n/index>`) to redirect the user to an "on success" URL.
+The security checks for these redirects (namely
+``django.util.http.is_safe_url()``) did not correctly validate some malformed
+URLs, such as `http:\\\\\\djangoproject.com`, which are accepted by some browsers
+with more liberal URL parsing.
+
+To remedy this, the validation in ``is_safe_url()`` has been tightened to be able
+to handle and correctly validate these malformed URLs.
diff --git a/lib/django-1.5/docs/releases/1.5.txt b/lib/django-1.5/docs/releases/1.5.txt
index b0bdfbc..47e8fb6 100644
--- a/lib/django-1.5/docs/releases/1.5.txt
+++ b/lib/django-1.5/docs/releases/1.5.txt
@@ -709,6 +709,10 @@
   escapes its ``contents`` argument. To avoid the HTML escaping, use
   :func:`django.utils.safestring.mark_safe` on the argument before passing it.
 
+* Accessing reverse one-to-one relations fetched via
+  :meth:`~django.db.models.query.QuerySet.select_related` now raises
+  :exc:`~django.core.exceptions.DoesNotExist` instead of returning ``None``.
+
 Features deprecated in 1.5
 ==========================
 
diff --git a/lib/django-1.5/docs/releases/index.txt b/lib/django-1.5/docs/releases/index.txt
index f3f741a..f6b5023 100644
--- a/lib/django-1.5/docs/releases/index.txt
+++ b/lib/django-1.5/docs/releases/index.txt
@@ -22,6 +22,11 @@
 .. toctree::
    :maxdepth: 1
 
+   1.5.8
+   1.5.7
+   1.5.6
+   1.5.5
+   1.5.4
    1.5.3
    1.5.2
    1.5.1
@@ -32,6 +37,12 @@
 .. toctree::
    :maxdepth: 1
 
+   1.4.13
+   1.4.12
+   1.4.11
+   1.4.10
+   1.4.9
+   1.4.8
    1.4.7
    1.4.6
    1.4.5
@@ -96,6 +107,16 @@
    0.96
    0.95
 
+Security releases
+=================
+
+Whenever a security issue is disclosed via :doc:`Django's security
+policies </internals/security>`, appropriate release notes are now
+added to all affected release series.
+
+Additionally, :doc:`an archive of disclosed security issues
+</releases/security>` is maintained.
+
 Development releases
 ====================
 
@@ -106,6 +127,7 @@
 .. toctree::
    :maxdepth: 1
 
+   security
    1.5-beta-1
    1.5-alpha-1
    1.4-beta-1
diff --git a/lib/django-1.5/docs/releases/security.txt b/lib/django-1.5/docs/releases/security.txt
new file mode 100644
index 0000000..d9c511e
--- /dev/null
+++ b/lib/django-1.5/docs/releases/security.txt
@@ -0,0 +1,501 @@
+.. _security-releases:
+
+==========================
+Archive of security issues
+==========================
+
+Django's development team is strongly committed to responsible
+reporting and disclosure of security-related issues, as outlined in
+:doc:`Django's security policies </internals/security>`.
+
+As part of that commitment, we maintain the following historical list
+of issues which have been fixed and disclosed. For each issue, the
+list below includes the date, a brief description, the `CVE identifier
+<http://en.wikipedia.org/wiki/Common_Vulnerabilities_and_Exposures>`_
+if applicable, a list of affected versions, a link to the full
+disclosure and links to the appropriate patch(es).
+
+Some important caveats apply to this information:
+
+* Lists of affected versions include only those versions of Django
+  which had stable, security-supported releases at the time of
+  disclosure. This means older versions (whose security support had
+  expired) and versions which were in pre-release (alpha/beta/RC)
+  states at the time of disclosure may have been affected, but are not
+  listed.
+
+* The Django project has on occasion issued security advisories,
+  pointing out potential security problems which can arise from
+  improper configuration or from other issues outside of Django
+  itself. Some of these advisories have received CVEs; when that is
+  the case, they are listed here, but as they have no accompanying
+  patches or releases, only the description, disclosure and CVE will
+  be listed.
+
+
+Issues prior to Django's security process
+=========================================
+
+Some security issues were handled before Django had a formalized
+security process in use. For these, new releases may not have been
+issued at the time and CVEs may not have been assigned.
+
+
+August 16, 2006 - CVE-2007-0404
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2007-0404 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2007-0404&cid=3>`_: Filename validation issue in translation framework. `Full description <https://www.djangoproject.com/weblog/2006/aug/16/compilemessages/>`__
+
+Versions affected
+-----------------
+
+* Django 0.90 `(patch) <https://github.com/django/django/commit/518d406e53>`__
+
+* Django 0.91 `(patch) <https://github.com/django/django/commit/518d406e53>`__
+
+* Django 0.95 `(patch) <https://github.com/django/django/commit/a132d411c6>`__ (released January 21 2007)
+
+January 21, 2007 - CVE-2007-0405
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2007-0405 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2007-0405&cid=3>`_: Apparent "caching" of authenticated user. `Full description <https://www.djangoproject.com/weblog/2007/jan/21/0951/>`__
+
+Versions affected
+-----------------
+
+* Django 0.95 `(patch) <https://github.com/django/django/commit/e89f0a6558>`__
+
+Issues under Django's security process
+======================================
+
+All other security issues have been handled under versions of Django's
+security process. These are listed below.
+
+October 26, 2007 - CVE-2007-5712
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2007-5712 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2007-5712&cid=3>`_: Denial-of-service via arbitrarily-large ``Accept-Language`` header. `Full description <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>`__
+
+Versions affected
+-----------------
+
+* Django 0.91 `(patch) <https://github.com/django/django/commit/8bc36e726c9e8c75c681d3ad232df8e882aaac81>`__
+
+* Django 0.95 `(patch) <https://github.com/django/django/commit/412ed22502e11c50dbfee854627594f0e7e2c234>`__
+
+* Django 0.96 `(patch) <https://github.com/django/django/commit/7dd2dd08a79e388732ce00e2b5514f15bd6d0f6f>`__
+
+
+May 14, 2008 - CVE-2008-2302
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2008-2302 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2008-2302&cid=3>`_: XSS via admin login redirect. `Full description <https://www.djangoproject.com/weblog/2008/may/14/security/>`__
+
+Versions affected
+-----------------
+
+* Django 0.91 `(patch) <https://github.com/django/django/commit/50ce7fb57d>`__
+
+* Django 0.95 `(patch) <https://github.com/django/django/commit/50ce7fb57d>`__
+
+* Django 0.96 `(patch) <https://github.com/django/django/commit/7791e5c050>`__
+
+
+September 2, 2008 - CVE-2008-3909
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2008-3909 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2008-3909&cid=3>`_: CSRF via preservation of POST data during admin login. `Full description <https://www.djangoproject.com/weblog/2008/sep/02/security/>`__
+
+Versions affected
+-----------------
+
+* Django 0.91 `(patch) <https://github.com/django/django/commit/44debfeaa4473bd28872c735dd3d9afde6886752>`__
+
+* Django 0.95 `(patch) <https://github.com/django/django/commit/aee48854a164382c655acb9f18b3c06c3d238e81>`__
+
+* Django 0.96 `(patch) <https://github.com/django/django/commit/7e0972bded362bc4b851c109df2c8a6548481a8e>`__
+
+July 28, 2009 - CVE-2009-2659
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2009-2659 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2009-2659&cid=3>`_: Directory-traversal in development server media handler. `Full description <https://www.djangoproject.com/weblog/2009/jul/28/security/>`__
+
+Versions affected
+-----------------
+
+* Django 0.96 `(patch) <https://github.com/django/django/commit/da85d76fd6>`__
+
+* Django 1.0 `(patch) <https://github.com/django/django/commit/df7f917b7f>`__
+
+October 9, 2009 - CVE-2009-3965
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2009-3965 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2009-3695&cid=3>`_: Denial-of-service via pathological regular expression performance. `Full description <https://www.djangoproject.com/weblog/2009/oct/09/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.0 `(patch) <https://github.com/django/django/commit/594a28a904>`__
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/e3e992e18b>`__
+
+September 8, 2010 - CVE-2010-3082
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2010-3082 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2010-3082&cid=3>`_: XSS via trusting unsafe cookie value. `Full description <https://www.djangoproject.com/weblog/2010/sep/08/security-release/>`__
+
+Versions affected
+-----------------
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/7f84657b6b>`__
+
+
+December 22, 2010 - CVE-2010-4534
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2010-4534 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2010-4534&cid=3>`_: Information leakage in administrative interface. `Full description <https://www.djangoproject.com/weblog/2010/dec/22/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/17084839fd>`__
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/85207a245b>`__
+
+December 22, 2010 - CVE-2010-4535
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2010-4535 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2010-4535&cid=2>`_: Denial-of-service in password-reset mechanism. `Full description <https://www.djangoproject.com/weblog/2010/dec/22/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/7f8dd9cbac>`__
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/d5d8942a16>`__
+
+
+February 8, 2011 - CVE-2011-0696
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-0696 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-0696&cid=2>`_: CSRF via forged HTTP headers. `Full description <https://www.djangoproject.com/weblog/2011/feb/08/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/408c5c873c>`__
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/818e70344e>`__
+
+
+February 8, 2011 - CVE-2011-0697
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-0697 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-0697&cid=2>`_: XSS via unsanitized names of uploaded files. `Full description <https://www.djangoproject.com/weblog/2011/feb/08/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/1966786d2d>`__
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/1f814a9547>`__
+
+February 8, 2011 - CVE-2011-0698
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-0698 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-0698&cid=2>`_: Directory-traversal on Windows via incorrect path-separator handling. `Full description <https://www.djangoproject.com/weblog/2011/feb/08/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.1 `(patch) <https://github.com/django/django/commit/570a32a047>`__
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/194566480b>`__
+
+
+September 9, 2011 - CVE-2011-4136
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-4136 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-4136&cid=2>`_: Session manipulation when using memory-cache-backed session. `Full description <https://www.djangoproject.com/weblog/2011/sep/09/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/ac7c3a110f>`__
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/fbe2eead2f>`__
+
+September 9, 2011 - CVE-2011-4137
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-4137 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-4137&cid=2>`_: Denial-of-service via via ``URLField.verify_exists``. `Full description <https://www.djangoproject.com/weblog/2011/sep/09/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/7268f8af86>`__
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/1a76dbefdf>`__
+
+September 9, 2011 - CVE-2011-4138
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-4138 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-4138&cid=2>`_: Information leakage/arbitrary request issuance via ``URLField.verify_exists``. `Full description <https://www.djangoproject.com/weblog/2011/sep/09/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.2: `(patch) <https://github.com/django/django/commit/7268f8af86>`__
+
+* Django 1.3: `(patch) <https://github.com/django/django/commit/1a76dbefdf>`__
+
+September 9, 2011 - CVE-2011-4139
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-4139 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-4139&cid=2>`_: ``Host`` header cache poisoning. `Full description <https://www.djangoproject.com/weblog/2011/sep/09/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.2 `(patch) <https://github.com/django/django/commit/c613af4d64>`__
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/2f7fadc38e>`__
+
+September 9, 2011 - CVE-2011-4140
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2011-4140 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2011-4140&cid=2>`_: Potential CSRF via ``Host`` header.  `Full description <https://www.djangoproject.com/weblog/2011/sep/09/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+This notification was an advisory only, so no patches were issued.
+
+* Django 1.2
+
+* Django 1.3
+
+
+July 30, 2012 - CVE-2012-3442
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2012-3442 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-3442&cid=2>`_: XSS via failure to validate redirect scheme. `Full description <https://www.djangoproject.com/weblog/2012/jul/30/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3: `(patch) <https://github.com/django/django/commit/4dea4883e6c50d75f215a6b9bcbd95273f57c72d>`__
+
+* Django 1.4: `(patch) <https://github.com/django/django/commit/e34685034b60be1112160e76091e5aee60149fa1>`__
+
+
+July 30, 2012 - CVE-2012-3443
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2012-3443 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-3443&cid=2>`_: Denial-of-service via compressed image files. `Full description <https://www.djangoproject.com/weblog/2012/jul/30/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3: `(patch) <https://github.com/django/django/commit/b2eb4787a0fff9c9993b78be5c698e85108f3446>`__
+
+* Django 1.4: `(patch) <https://github.com/django/django/commit/c14f325c4eef628bc7bfd8873c3a72aeb0219141>`__
+
+
+July 30, 2012 - CVE-2012-3444
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2012-3444 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-3444&cid=2>`_: Denial-of-service via large image files. `Full description <https://www.djangoproject.com/weblog/2012/jul/30/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/9ca0ff6268eeff92d0d0ac2c315d4b6a8e229155>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/da33d67181b53fe6cc737ac1220153814a1509f6>`__
+
+
+October 17, 2012 - CVE-2012-4520
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2012-4520 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-4520&cid=2>`_: ``Host`` header poisoning. `Full description <https://www.djangoproject.com/weblog/2012/oct/17/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/b45c377f8f488955e0c7069cad3f3dd21910b071>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/92d3430f12171f16f566c9050c40feefb830a4a3>`__
+
+
+December 10, 2012 - No CVE 1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Additional hardening of ``Host`` header handling. `Full description <https://www.djangoproject.com/weblog/2012/dec/10/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/2da4ace0bc1bc1d79bf43b368cb857f6f0cd6b1b>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/319627c184e71ae267d6b7f000e293168c7b6e09>`__
+
+
+December 10, 2012 - No CVE 2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Additional hardening of redirect validation. `Full description <https://www.djangoproject.com/weblog/2012/dec/10/security/>`__
+
+Versions affected
+-----------------
+
+    * Django 1.3: `(patch) <https://github.com/django/django/commit/1515eb46daa0897ba5ad5f0a2db8969255f1b343>`__
+
+    * Django 1.4: `(patch) <https://github.com/django/django/commit/b2ae0a63aeec741f1e51bac9a95a27fd635f9652>`__
+
+February 19, 2013 - No CVE
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Additional hardening of ``Host`` header handling. `Full description <https://www.djangoproject.com/weblog/2013/feb/19/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/27cd872e6e36a81d0bb6f5b8765a1705fecfc253>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/9936fdb11d0bbf0bd242f259bfb97bbf849d16f8>`__
+
+February 19, 2013 - CVE-2013-1664/1665
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2013-1664 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-1664&cid=2>`_ and `CVE-2013-1665 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-1665&cid=2>`_: Entity-based attacks against Python XML libraries. `Full description <https://www.djangoproject.com/weblog/2013/feb/19/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/d19a27066b2247102e65412aa66917aff0091112>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/1c60d07ba23e0350351c278ad28d0bd5aa410b40>`__
+
+February 19, 2013 - CVE-2013-0305
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2013-0305 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-0305&cid=2>`_: Information leakage via admin history log.  `Full description <https://www.djangoproject.com/weblog/2013/feb/19/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/d3a45e10c8ac8268899999129daa27652ec0da35>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/0e7861aec73702f7933ce2a93056f7983939f0d6>`__
+
+
+February 19, 2013 - CVE-2013-0306
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2013-0306 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-0306&cid=2>`_: Denial-of-service via formset ``max_num`` bypass. `Full description <https://www.djangoproject.com/weblog/2013/feb/19/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.3 `(patch) <https://github.com/django/django/commit/d7094bbce8cb838f3b40f504f198c098ff1cf727>`__
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/0cc350a896f70ace18280410eb616a9197d862b0>`__
+
+August 13, 2013 - Awaiting CVE 1
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+(CVE not yet issued): XSS via admin trusting ``URLField`` values. `Full description <https://www.djangoproject.com/weblog/2013/aug/13/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.5 `(patch) <https://github.com/django/django/commit/90363e388c61874add3f3557ee654a996ec75d78>`__
+
+August 13, 2013 - Awaiting CVE 2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+(CVE not yet issued): Possible XSS via unvalidated URL redirect schemes. `Full description <https://www.djangoproject.com/weblog/2013/aug/13/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/ec67af0bd609c412b76eaa4cc89968a2a8e5ad6a>`__
+
+* Django 1.5 `(patch) <https://github.com/django/django/commit/1a274ccd6bc1afbdac80344c9b6e5810c1162b5f>`__
+
+September 10, 2013 - CVE-2013-4315
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2013-4315 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-4315&cid=2>`_ Directory-traversal via ``ssi`` template tag. `Full description <https://www.djangoproject.com/weblog/2013/sep/10/security-releases-issued/>`__
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch) <https://github.com/django/django/commit/87d2750b39f6f2d54b7047225521a44dcd37e896>`__
+
+* Django 1.5 `(patch) <https://github.com/django/django/commit/988b61c550d798f9a66d17ee0511fb7a9a7f33ca>`__
+
+
+September 14, 2013 - CVE-2013-1443
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+CVE-2013-1443: Denial-of-service via large passwords. `Full description <https://www.djangoproject.com/weblog/2013/sep/15/security/>`__
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch <https://github.com/django/django/commit/3f3d887a6844ec2db743fee64c9e53e04d39a368>`__ and `Python compatibility fix) <https://github.com/django/django/commit/6903d1690a92aa040adfb0c8eb37cf62e4206714>`__
+
+* Django 1.5 `(patch) <https://github.com/django/django/commit/22b74fa09d7ccbc8c52270d648a0da7f3f0fa2bc>`__
+
+
+April 21, 2014 - CVE-2014-2014-0472
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2014-0472 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2014-0472&cid=2>`_: Unexpected code execution using ``reverse()``. `Full description <https://www.djangoproject.com/weblog/2014/apr/21/security/>`_
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch <https://github.com/django/django/commit/c1a8c420fe4b27fb2caf5e46d23b5712fc0ac535>`_)
+
+* Django 1.5 `(patch <https://github.com/django/django/commit/2a5bcb69f42b84464b24b5c835dca6467b6aa7f1>`_)
+
+* Django 1.6 `(patch <https://github.com/django/django/commit/4352a50871e239ebcdf64eee6f0b88e714015c1b>`_)
+
+* Django 1.7 `(patch <https://github.com/django/django/commit/546740544d7f69254a67b06a3fc7fa0c43512958>`_)
+
+
+April 21, 2014 - CVE-2014-2014-0473
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2014-0473 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2014-0473&cid=2>`_: Caching of anonymous pages could reveal CSRF token. `Full description <https://www.djangoproject.com/weblog/2014/apr/21/security/>`_
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch <https://github.com/django/django/commit/1170f285ddd6a94a65f911a27788ba49ca08c0b0>`_)
+
+* Django 1.5 `(patch <https://github.com/django/django/commit/6872f42757d7ef6a97e0b6ec5db4d2615d8a2bd8>`_)
+
+* Django 1.6 `(patch <https://github.com/django/django/commit/d63e20942f3024f24cb8cd85a49461ba8a9b6736>`_)
+
+* Django 1.7 `(patch <https://github.com/django/django/commit/380545bf85cbf17fc698d136815b7691f8d023ca>`_)
+
+
+April 21, 2014 - CVE-2014-2014-0472
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`CVE-2014-0474 <http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2014-0474&cid=2>`_: MySQL typecasting causes unexpected query results. `Full description <https://www.djangoproject.com/weblog/2014/apr/21/security/>`_
+
+Versions affected
+-----------------
+
+* Django 1.4 `(patch <https://github.com/django/django/commit/aa80f498de6d687e613860933ac58433ab71ea4b>`_)
+
+* Django 1.5 `(patch <https://github.com/django/django/commit/985434fb1d6bf2335bf96c6ebf91c3674f1f399f>`_)
+
+* Django 1.6 `(patch <https://github.com/django/django/commit/5f0829a27e85d89ad8c433f5c6a7a7d17c9e9292>`_)
+
+* Django 1.7 `(patch <https://github.com/django/django/commit/34526c2f56b863c2103655a0893ac801667e86ea>`_)
diff --git a/lib/django-1.5/docs/topics/auth/customizing.txt b/lib/django-1.5/docs/topics/auth/customizing.txt
index 73a4d53..296b5fe 100644
--- a/lib/django-1.5/docs/topics/auth/customizing.txt
+++ b/lib/django-1.5/docs/topics/auth/customizing.txt
@@ -47,7 +47,7 @@
 Behind the scenes, Django maintains a list of "authentication backends" that it
 checks for authentication. When somebody calls
 :func:`django.contrib.auth.authenticate()` -- as described in :ref:`How to log
-a user in <how-to-log-a-user-in>` above -- Django tries authenticating across
+a user in <how-to-log-a-user-in>` -- Django tries authenticating across
 all of its authentication backends. If the first authentication method fails,
 Django tries the second one, and so on, until all backends have been attempted.
 
@@ -270,7 +270,7 @@
 
 The only thing this does is create those extra permissions when you run
 :djadmin:`manage.py syncdb <syncdb>`. Your code is in charge of checking the
-value of these permissions when an user is trying to access the functionality
+value of these permissions when a user is trying to access the functionality
 provided by the application (viewing tasks, changing the status of tasks,
 closing tasks.) Continuing the above example, the following checks if a user may
 view tasks::
@@ -524,8 +524,9 @@
 
         .. note::
 
-            ``REQUIRED_FIELDS`` must contain all required fields on your User
-            model, but should *not* contain the ``USERNAME_FIELD``.
+            ``REQUIRED_FIELDS`` must contain all required fields on your
+            ``User`` model, but should *not* contain the ``USERNAME_FIELD`` or
+            ``password`` as these fields will always be prompted for.
 
     .. attribute:: is_active
 
diff --git a/lib/django-1.5/docs/topics/auth/default.txt b/lib/django-1.5/docs/topics/auth/default.txt
index a104a5c..e5d3440 100644
--- a/lib/django-1.5/docs/topics/auth/default.txt
+++ b/lib/django-1.5/docs/topics/auth/default.txt
@@ -12,9 +12,9 @@
 supports extensive :doc:`extension and customization
 </topics/auth/customizing>` of authentication.
 
-Django authentication provides both authentication and authorization, together
+Django authentication provides both authentication and authorization together
 and is generally referred to as the authentication system, as these features
-somewhat coupled.
+are somewhat coupled.
 
 .. _user-objects:
 
@@ -494,7 +494,7 @@
 The permission_required decorator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. function:: permission_required([login_url=None, raise_exception=False])
+.. function:: permission_required(perm, [login_url=None, raise_exception=False])
 
     It's a relatively common task to check whether a user has a particular
     permission. For that reason, Django provides a shortcut for that case: the
@@ -578,8 +578,7 @@
       log the user in. Defaults to :file:`registration/login.html`.
 
     * ``redirect_field_name``: The name of a ``GET`` field containing the
-      URL to redirect to after login. Overrides ``next`` if the given
-      ``GET`` parameter is passed.
+      URL to redirect to after login. Defaults to ``next``.
 
     * ``authentication_form``: A callable (typically just a form class) to
       use for authentication. Defaults to
@@ -696,8 +695,8 @@
       :file:`registration/logged_out.html` if no argument is supplied.
 
     * ``redirect_field_name``: The name of a ``GET`` field containing the
-      URL to redirect to after log out. Overrides ``next_page`` if the given
-      ``GET`` parameter is passed.
+      URL to redirect to after log out. Defaults to ``next``. Overrides the
+      ``next_page`` URL if the given ``GET`` parameter is passed.
 
     * ``current_app``: A hint indicating which application contains the current
       view. See the :ref:`namespaced URL resolution strategy
diff --git a/lib/django-1.5/docs/topics/auth/index.txt b/lib/django-1.5/docs/topics/auth/index.txt
index 8447d44..a51180f 100644
--- a/lib/django-1.5/docs/topics/auth/index.txt
+++ b/lib/django-1.5/docs/topics/auth/index.txt
@@ -12,7 +12,7 @@
 .. module:: django.contrib.auth
    :synopsis: Django's authentication framework.
 
-Django comes with an user authentication system. It handles user accounts,
+Django comes with a user authentication system. It handles user accounts,
 groups, permissions and cookie-based user sessions. This section of the
 documentation explains how the default implementation works out of the box, as
 well as how to :doc:`extend and customize </topics/auth/customizing>` it to
diff --git a/lib/django-1.5/docs/topics/class-based-views/generic-editing.txt b/lib/django-1.5/docs/topics/class-based-views/generic-editing.txt
index 66ba36f..313f200 100644
--- a/lib/django-1.5/docs/topics/class-based-views/generic-editing.txt
+++ b/lib/django-1.5/docs/topics/class-based-views/generic-editing.txt
@@ -168,7 +168,7 @@
 the foreign key relation to the model::
 
     # models.py
-    from django.contrib.auth import User
+    from django.contrib.auth.models import User
     from django.db import models
 
     class Author(models.Model):
diff --git a/lib/django-1.5/docs/topics/class-based-views/index.txt b/lib/django-1.5/docs/topics/class-based-views/index.txt
index b2fa93e..84e4597 100644
--- a/lib/django-1.5/docs/topics/class-based-views/index.txt
+++ b/lib/django-1.5/docs/topics/class-based-views/index.txt
@@ -69,9 +69,10 @@
         template_name = "about.html"
 
 Then we just need to add this new view into our URLconf.
-`~django.views.generic.base.TemplateView` is a class, not a function, so we
-point the URL to the :meth:`~django.views.generic.base.View.as_view` class
-method instead, which provides a function-like entry to class-based views::
+:class:`~django.views.generic.base.TemplateView` is a class, not a function,
+so we point the URL to the :meth:`~django.views.generic.base.View.as_view`
+class method instead, which provides a function-like entry to class-based
+views::
 
     # urls.py
     from django.conf.urls import patterns
diff --git a/lib/django-1.5/docs/topics/class-based-views/mixins.txt b/lib/django-1.5/docs/topics/class-based-views/mixins.txt
index fcde531..14fbfd9 100644
--- a/lib/django-1.5/docs/topics/class-based-views/mixins.txt
+++ b/lib/django-1.5/docs/topics/class-based-views/mixins.txt
@@ -285,17 +285,18 @@
 for the paginated list of books can hang off the publisher found as the single
 object. In order to do this, we need to have two different querysets:
 
-**``Publisher`` queryset for use in ``get_object``**
-    We'll set the ``model`` attribute on the view and rely on the default
-    implementation of ``get_object()`` to fetch the correct ``Publisher``
-    object.
+``Book`` queryset for use by :class:`~django.views.generic.list.ListView`
+    Since we have access to the ``Publisher`` whose books we want to list, we
+    simply override ``get_queryset()`` and use the ``Publisher``'s
+    :ref:`reverse foreign key manager<backwards-related-objects>`.
 
-**``Book`` queryset for use by ``ListView``**
-    The default implementation of ``get_queryset`` uses the ``model`` attribute
-    to construct the queryset. This conflicts with our use of this attribute
-    for ``get_object`` so we'll override that method and have it return
-    the queryset of ``Book`` objects linked to the ``Publisher`` we're looking
-    at.
+``Publisher`` queryset for use in :meth:`~django.views.generic.detail.SingleObjectMixin.get_object()`
+    We'll rely on the default implementation of ``get_object()`` to fetch the
+    correct ``Publisher`` object.
+    However, we need to explicitly pass a ``queryset`` argument because
+    otherwise the default implementation of ``get_object()`` would call
+    ``get_queryset()`` which we have overridden to return ``Book`` objects
+    instead of ``Publisher`` ones.
 
 .. note::
 
@@ -315,12 +316,11 @@
     from books.models import Publisher
 
     class PublisherDetail(SingleObjectMixin, ListView):
-        model = Publisher  # for SingleObjectMixin.get_object
         paginate_by = 2
         template_name = "books/publisher_detail.html"
 
         def get(self, request, *args, **kwargs):
-            self.object = self.get_object()
+            self.object = self.get_object(queryset=Publisher.objects.all())
             return super(PublisherDetail, self).get(request, *args, **kwargs)
 
         def get_context_data(self, **kwargs):
diff --git a/lib/django-1.5/docs/topics/db/models.txt b/lib/django-1.5/docs/topics/db/models.txt
index 58c77bc..67777d0 100644
--- a/lib/django-1.5/docs/topics/db/models.txt
+++ b/lib/django-1.5/docs/topics/db/models.txt
@@ -4,9 +4,9 @@
 
 .. module:: django.db.models
 
-A model is the single, definitive source of data about your data. It contains
-the essential fields and behaviors of the data you're storing. Generally, each
-model maps to a single database table.
+A model is the single, definitive source of information about your data. It
+contains the essential fields and behaviors of the data you're storing.
+Generally, each model maps to a single database table.
 
 The basics:
 
@@ -48,7 +48,7 @@
 
 * The name of the table, ``myapp_person``, is automatically derived from
   some model metadata but can be overridden. See :ref:`table-names` for more
-  details..
+  details.
 
 * An ``id`` field is added automatically, but this behavior can be
   overridden. See :ref:`automatic-primary-key-fields`.
@@ -110,7 +110,7 @@
 
 * The database column type (e.g. ``INTEGER``, ``VARCHAR``).
 
-* The default :doc:`widget </ref/forms/widgets>` to use when rendering a form
+* The default HTML :doc:`widget </ref/forms/widgets>` to use when rendering a form
   field (e.g. ``<input type="text">``, ``<select>``).
 
 * The minimal validation requirements, used in Django's admin and in
@@ -234,7 +234,7 @@
 ``id`` column.
 
 Each model requires exactly one field to have :attr:`primary_key=True
-<Field.primary_key>`.
+<Field.primary_key>` (either explicitly declared or automatically added).
 
 .. _verbose-field-names:
 
diff --git a/lib/django-1.5/docs/topics/db/multi-db.txt b/lib/django-1.5/docs/topics/db/multi-db.txt
index 8150e49..a2c4aeb 100644
--- a/lib/django-1.5/docs/topics/db/multi-db.txt
+++ b/lib/django-1.5/docs/topics/db/multi-db.txt
@@ -694,6 +694,6 @@
 
 .. warning::
 
-    If you're synchronizing content types to more that one database, be aware
+    If you're synchronizing content types to more than one database, be aware
     that their primary keys may not match across databases. This may result in
     data corruption or data loss.
diff --git a/lib/django-1.5/docs/topics/db/optimization.txt b/lib/django-1.5/docs/topics/db/optimization.txt
index c1459ae..9e3c360 100644
--- a/lib/django-1.5/docs/topics/db/optimization.txt
+++ b/lib/django-1.5/docs/topics/db/optimization.txt
@@ -38,7 +38,9 @@
 
 * Indexes. This is a number one priority, *after* you have determined from
   profiling what indexes should be added. Use
-  :attr:`django.db.models.Field.db_index` to add these from Django.
+  :attr:`django.db.models.Field.db_index` or
+  :attr:`Meta.index_together <django.db.models.Options.index_together>` to add
+  these from Django.
 
 * Appropriate use of field types.
 
diff --git a/lib/django-1.5/docs/topics/db/queries.txt b/lib/django-1.5/docs/topics/db/queries.txt
index e14849b..a9f1374 100644
--- a/lib/django-1.5/docs/topics/db/queries.txt
+++ b/lib/django-1.5/docs/topics/db/queries.txt
@@ -139,8 +139,8 @@
 :class:`~django.db.models.Manager` on your model class.
 
 A :class:`~django.db.models.query.QuerySet` represents a collection of objects
-from your database. It can have zero, one or many *filters* -- criteria that
-narrow down the collection based on given parameters. In SQL terms, a
+from your database. It can have zero, one or many *filters*. Filters narrow
+down the query results based on the given parameters. In SQL terms, a
 :class:`~django.db.models.query.QuerySet` equates to a ``SELECT`` statement,
 and a filter is a limiting clause such as ``WHERE`` or ``LIMIT``.
 
@@ -256,10 +256,10 @@
 These three ``QuerySets`` are separate. The first is a base
 :class:`~django.db.models.query.QuerySet` containing all entries that contain a
 headline starting with "What". The second is a subset of the first, with an
-additional criteria that excludes records whose ``pub_date`` is greater than
-now. The third is a subset of the first, with an additional criteria that
-selects only the records whose ``pub_date`` is greater than now. The initial
-:class:`~django.db.models.query.QuerySet` (``q1``) is unaffected by the
+additional criteria that excludes records whose ``pub_date`` is today or in the
+future. The third is a subset of the first, with an additional criteria that
+selects only the records whose ``pub_date`` is today or in the future. The
+initial :class:`~django.db.models.query.QuerySet` (``q1``) is unaffected by the
 refinement process.
 
 .. _querysets-are-lazy:
@@ -1092,8 +1092,9 @@
     >>> e.blog = some_blog
     >>> e.save()
 
-If a :class:`~django.db.models.ForeignKey` field has ``null=True`` set (i.e., it allows ``NULL``
-values), you can assign ``None`` to it. Example::
+If a :class:`~django.db.models.ForeignKey` field has ``null=True`` set (i.e.,
+it allows ``NULL`` values), you can assign ``None`` to remove the relation.
+Example::
 
     >>> e = Entry.objects.get(id=2)
     >>> e.blog = None
diff --git a/lib/django-1.5/docs/topics/db/sql.txt b/lib/django-1.5/docs/topics/db/sql.txt
index 6cc174a..5aff03c 100644
--- a/lib/django-1.5/docs/topics/db/sql.txt
+++ b/lib/django-1.5/docs/topics/db/sql.txt
@@ -23,11 +23,11 @@
 
 .. method:: Manager.raw(raw_query, params=None, translations=None)
 
-This method method takes a raw SQL query, executes it, and returns a
+This method takes a raw SQL query, executes it, and returns a
 ``django.db.models.query.RawQuerySet`` instance. This ``RawQuerySet`` instance
-can be iterated over just like an normal QuerySet to provide object instances.
+can be iterated over just like a normal QuerySet to provide object instances.
 
-This is best illustrated with an example. Suppose you've got the following model::
+This is best illustrated with an example. Suppose you have the following model::
 
     class Person(models.Model):
         first_name = models.CharField(...)
@@ -66,6 +66,16 @@
     database, but does nothing to enforce that. If the query does not
     return rows, a (possibly cryptic) error will result.
 
+.. warning::
+
+    If you are performing queries on MySQL, note that MySQL's silent type coercion
+    may cause unexpected results when mixing types. If you query on a string
+    type column, but with an integer value, MySQL will coerce the types of all values
+    in the table to an integer before performing the comparison. For example, if your
+    table contains the values ``'abc'``, ``'def'`` and you query for ``WHERE mycolumn=0``,
+    both rows will match. To prevent this, perform the correct typecasting
+    before using the value in a query.
+
 Mapping query fields to model fields
 ------------------------------------
 
@@ -108,7 +118,7 @@
     >>> first_person = Person.objects.raw('SELECT * from myapp_person')[0]
 
 However, the indexing and slicing are not performed at the database level. If
-you have a big amount of ``Person`` objects in your database, it is more
+you have a large number of ``Person`` objects in your database, it is more
 efficient to limit the query at the SQL level::
 
     >>> first_person = Person.objects.raw('SELECT * from myapp_person LIMIT 1')[0]
diff --git a/lib/django-1.5/docs/topics/email.txt b/lib/django-1.5/docs/topics/email.txt
index b3d7254..9aa9a26 100644
--- a/lib/django-1.5/docs/topics/email.txt
+++ b/lib/django-1.5/docs/topics/email.txt
@@ -422,7 +422,7 @@
 ~~~~~~~~~~~~~~~
 
 Instead of sending out real emails the console backend just writes the
-emails that would be send to the standard output. By default, the console
+emails that would be sent to the standard output. By default, the console
 backend writes to ``stdout``. You can use a different stream-like object by
 providing the ``stream`` keyword argument when constructing the connection.
 
@@ -461,7 +461,7 @@
 ``django.core.mail`` module. The ``outbox`` attribute is created when the
 first message is sent. It's a list with an
 :class:`~django.core.mail.EmailMessage` instance for each message that would
-be send.
+be sent.
 
 To specify this backend, put the following in your settings::
 
diff --git a/lib/django-1.5/docs/topics/forms/index.txt b/lib/django-1.5/docs/topics/forms/index.txt
index 4035fbd..3fb17cf 100644
--- a/lib/django-1.5/docs/topics/forms/index.txt
+++ b/lib/django-1.5/docs/topics/forms/index.txt
@@ -319,6 +319,12 @@
     The field's label wrapped in the appropriate HTML ``<label>`` tag,
     e.g. ``<label for="id_email">Email address</label>``
 
+``{{ field.id_for_label }}``
+    The ID that will be used for this field (``id_email`` in the example
+    above). You may want to use this in lieu of ``label_tag`` if you are
+    constructing the label manually. It's also useful, for example, if you have
+    some inline JavaScript and want to avoid hardcoding the field's ID.
+
 ``{{ field.value }}``
     The value of the field. e.g ``someone@example.com``
 
diff --git a/lib/django-1.5/docs/topics/forms/modelforms.txt b/lib/django-1.5/docs/topics/forms/modelforms.txt
index 34fb9b9..936a6cd 100644
--- a/lib/django-1.5/docs/topics/forms/modelforms.txt
+++ b/lib/django-1.5/docs/topics/forms/modelforms.txt
@@ -205,9 +205,8 @@
 ``full_clean()``, although you will typically not use the latter method in
 practice.
 
-``Model`` validation (:meth:`Model.full_clean()
-<django.db.models.Model.full_clean()>`) is triggered from within the form
-validation step, right after the form's ``clean()`` method is called.
+``Model`` validation is triggered from within the form validation step,
+right after the form's ``clean()`` method is called.
 
 .. warning::
 
@@ -647,6 +646,30 @@
 
    >>> AuthorFormSet(queryset=Author.objects.none())
 
+Changing the ``form``
+---------------------
+
+By default, when you use ``modelformset_factory``, a model form will
+be created using :func:`~django.forms.models.modelform_factory`.
+Often, it can be useful to specify a custom model form. For example,
+you can create a custom model form that has custom validation::
+
+    class AuthorForm(forms.ModelForm):
+        class Meta:
+            model = Author
+            fields = ('name', 'title')
+
+        def clean_name(self):
+            # custom validation for the name field
+            ...
+
+Then, pass your model form to the factory function::
+
+    AuthorFormSet = modelformset_factory(Author, form=AuthorForm)
+
+It is not always necessary to define a custom model form. The
+``modelformset_factory`` function has several arguments which are
+passed through to ``modelform_factory``, which are described below.
 
 Controlling which fields are used with ``fields`` and ``exclude``
 -----------------------------------------------------------------
@@ -712,6 +735,12 @@
 need to call ``formset.save_m2m()`` to ensure the many-to-many relationships
 are saved properly.
 
+.. note::
+
+    While calling ``formset.save(commit=False)`` does not save new or changed
+    objects to the database, it *does* delete objects that have been marked for
+    deletion. This behavior will be corrected in Django 1.7.
+
 .. _model-formsets-max-num:
 
 Limiting the number of editable objects
@@ -786,9 +815,10 @@
     class MyModelFormSet(BaseModelFormSet):
         def clean(self):
             super(MyModelFormSet, self).clean()
-            # example custom validation across forms in the formset:
+            # example custom validation across forms in the formset
             for form in self.forms:
                 # your custom formset validation
+                ...
 
 Using a custom queryset
 -----------------------
@@ -909,13 +939,35 @@
 
     :ref:`Manually rendered can_delete and can_order <manually-rendered-can-delete-and-can-order>`.
 
-Overriding ``clean()`` on an ``InlineFormSet``
-----------------------------------------------
+Overriding methods on an ``InlineFormSet``
+------------------------------------------
 
-See :ref:`model-formsets-overriding-clean`, but subclass
+When overriding methods on ``InlineFormSet``, you should subclass
 :class:`~models.BaseInlineFormSet` rather than
 :class:`~models.BaseModelFormSet`.
 
+For example, if you want to override ``clean()``::
+
+    from django.forms.models import BaseInlineFormSet
+
+    class CustomInlineFormSet(BaseInlineFormSet):
+        def clean(self):
+            super(CustomInlineFormSet, self).clean()
+            # example custom validation across forms in the formset
+            for form in self.forms:
+                # your custom formset validation
+                ...
+
+See also :ref:`model-formsets-overriding-clean`.
+
+Then when you create your inline formset, pass in the optional argument
+``formset``::
+
+    >>> from django.forms.models import inlineformset_factory
+    >>> BookFormSet = inlineformset_factory(Author, Book, formset=CustomInlineFormSet)
+    >>> author = Author.objects.get(name=u'Mike Royko')
+    >>> formset = BookFormSet(instance=author)
+
 More than one foreign key to the same model
 -------------------------------------------
 
diff --git a/lib/django-1.5/docs/topics/http/file-uploads.txt b/lib/django-1.5/docs/topics/http/file-uploads.txt
index 457c2aa..b2e8fd3 100644
--- a/lib/django-1.5/docs/topics/http/file-uploads.txt
+++ b/lib/django-1.5/docs/topics/http/file-uploads.txt
@@ -357,7 +357,7 @@
 
 Custom file upload handlers **must** define the following methods:
 
-``FileUploadHandler.receive_data_chunk(self, raw_data, start)``
+``FileUploadHandler.receive_data_chunk(raw_data, start)``
     Receives a "chunk" of data from the file upload.
 
     ``raw_data`` is a byte string containing the uploaded data.
@@ -377,7 +377,7 @@
     If you raise a ``StopUpload`` or a ``SkipFile`` exception, the upload
     will abort or the file will be completely skipped.
 
-``FileUploadHandler.file_complete(self, file_size)``
+``FileUploadHandler.file_complete(file_size)``
     Called when a file has finished uploading.
 
     The handler should return an ``UploadedFile`` object that will be stored
@@ -402,7 +402,7 @@
 
     The default is 64*2\ :sup:`10` bytes, or 64 KB.
 
-``FileUploadHandler.new_file(self, field_name, file_name, content_type, content_length, charset)``
+``FileUploadHandler.new_file(field_name, file_name, content_type, content_length, charset)``
     Callback signaling that a new file upload is starting. This is called
     before any data has been fed to any upload handlers.
 
@@ -422,10 +422,10 @@
     This method may raise a ``StopFutureHandlers`` exception to prevent
     future handlers from handling this file.
 
-``FileUploadHandler.upload_complete(self)``
+``FileUploadHandler.upload_complete()``
     Callback signaling that the entire upload (all files) has completed.
 
-``FileUploadHandler.handle_raw_input(self, input_data, META, content_length, boundary, encoding)``
+``FileUploadHandler.handle_raw_input(input_data, META, content_length, boundary, encoding)``
     Allows the handler to completely override the parsing of the raw
     HTTP input.
 
diff --git a/lib/django-1.5/docs/topics/http/middleware.txt b/lib/django-1.5/docs/topics/http/middleware.txt
index c27e7e8..8e5500a 100644
--- a/lib/django-1.5/docs/topics/http/middleware.txt
+++ b/lib/django-1.5/docs/topics/http/middleware.txt
@@ -60,7 +60,7 @@
 ``process_request``
 -------------------
 
-.. method:: process_request(self, request)
+.. method:: process_request(request)
 
 ``request`` is an :class:`~django.http.HttpRequest` object. This method is
 called on each request, before Django decides which view to execute.
@@ -78,7 +78,7 @@
 ``process_view``
 ----------------
 
-.. method:: process_view(self, request, view_func, view_args, view_kwargs)
+.. method:: process_view(request, view_func, view_args, view_kwargs)
 
 ``request`` is an :class:`~django.http.HttpRequest` object. ``view_func`` is
 the Python function that Django is about to use. (It's the actual function
@@ -117,7 +117,7 @@
 ``process_template_response``
 -----------------------------
 
-.. method:: process_template_response(self, request, response)
+.. method:: process_template_response(request, response)
 
 ``request`` is an :class:`~django.http.HttpRequest` object. ``response`` is a
 subclass of :class:`~django.template.response.SimpleTemplateResponse` (e.g.
@@ -146,7 +146,7 @@
 ``process_response``
 --------------------
 
-.. method:: process_response(self, request, response)
+.. method:: process_response(request, response)
 
 ``request`` is an :class:`~django.http.HttpRequest` object. ``response`` is the
 :class:`~django.http.HttpResponse` object returned by a Django view.
@@ -187,7 +187,7 @@
 ``process_exception``
 ---------------------
 
-.. method:: process_exception(self, request, exception)
+.. method:: process_exception(request, exception)
 
 ``request`` is an :class:`~django.http.HttpRequest` object. ``exception`` is an
 ``Exception`` object raised by the view function.
diff --git a/lib/django-1.5/docs/topics/http/sessions.txt b/lib/django-1.5/docs/topics/http/sessions.txt
index 039a775..e063020 100644
--- a/lib/django-1.5/docs/topics/http/sessions.txt
+++ b/lib/django-1.5/docs/topics/http/sessions.txt
@@ -76,7 +76,7 @@
 
 * Set :setting:`SESSION_ENGINE` to
   ``"django.contrib.sessions.backends.cache"`` for a simple caching session
-  store. Session data will be stored directly your cache. However, session
+  store. Session data will be stored directly in your cache. However, session
   data may not be persistent: cached data can be evicted if the cache fills
   up or if the cache server is restarted.
 
@@ -154,8 +154,12 @@
     integrity of the data (that it is all there and correct), it cannot
     guarantee freshness i.e. that you are being sent back the last thing you
     sent to the client. This means that for some uses of session data, the
-    cookie backend might open you up to `replay attacks`_. Cookies will only be
-    detected as 'stale' if they are older than your
+    cookie backend might open you up to `replay attacks`_. Unlike other session
+    backends which keep a server-side record of each session and invalidate it
+    when a user logs out, cookie-based sessions are not invalidated when a user
+    logs out. Thus if an attacker steals a user's cookie, he can use that
+    cookie to login as that user even if the user logs out. Cookies will only
+    be detected as 'stale' if they are older than your
     :setting:`SESSION_COOKIE_AGE`.
 
     **Performance**
@@ -206,17 +210,17 @@
 
       Example: ``fav_color = request.session.pop('fav_color')``
 
-    .. method:: keys
+    .. method:: keys()
 
-    .. method:: items
+    .. method:: items()
 
-    .. method:: setdefault
+    .. method:: setdefault()
 
-    .. method:: clear
+    .. method:: clear()
 
     It also has these methods:
 
-    .. method:: flush
+    .. method:: flush()
 
       Delete the current session data from the session and regenerate the
       session key value that is sent back to the user in the cookie. This is
@@ -224,21 +228,21 @@
       accessed again from the user's browser (for example, the
       :func:`django.contrib.auth.logout()` function calls it).
 
-    .. method:: set_test_cookie
+    .. method:: set_test_cookie()
 
       Sets a test cookie to determine whether the user's browser supports
       cookies. Due to the way cookies work, you won't be able to test this
       until the user's next page request. See `Setting test cookies`_ below for
       more information.
 
-    .. method:: test_cookie_worked
+    .. method:: test_cookie_worked()
 
       Returns either ``True`` or ``False``, depending on whether the user's
       browser accepted the test cookie. Due to the way cookies work, you'll
       have to call ``set_test_cookie()`` on a previous, separate page request.
       See `Setting test cookies`_ below for more information.
 
-    .. method:: delete_test_cookie
+    .. method:: delete_test_cookie()
 
       Deletes the test cookie. Use this to clean up after yourself.
 
@@ -267,7 +271,7 @@
       purposes. Session expiration is computed from the last time the
       session was *modified*.
 
-    .. method:: get_expiry_age
+    .. method:: get_expiry_age()
 
       Returns the number of seconds until this session expires. For sessions
       with no custom expiration (or those set to expire at browser close), this
@@ -282,7 +286,7 @@
         ``None``. Defaults to the value stored in the session by
         :meth:`set_expiry`, if there is one, or ``None``.
 
-    .. method:: get_expiry_date
+    .. method:: get_expiry_date()
 
       Returns the date this session will expire. For sessions with no custom
       expiration (or those set to expire at browser close), this will equal the
@@ -290,18 +294,24 @@
 
       This function accepts the same keyword argumets as :meth:`get_expiry_age`.
 
-    .. method:: get_expire_at_browser_close
+    .. method:: get_expire_at_browser_close()
 
       Returns either ``True`` or ``False``, depending on whether the user's
       session cookie will expire when the user's Web browser is closed.
 
-    .. method:: SessionBase.clear_expired
+    .. method:: clear_expired()
 
       .. versionadded:: 1.5
 
       Removes expired sessions from the session store. This class method is
       called by :djadmin:`clearsessions`.
 
+    .. method:: cycle_key()
+
+      Creates a new session key while retaining the current session data.
+      :func:`django.contrib.auth.login()` calls this method to mitigate against
+      session fixation.
+
 .. _session_serialization:
 
 Session serialization
@@ -482,7 +492,7 @@
     >>> s['last_login']
     1376587691
 
-In order to prevent session fixation attacks, sessions keys that don't exist
+In order to mitigate session fixation attacks, sessions keys that don't exist
 are regenerated::
 
     >>> from django.contrib.sessions.backends.db import SessionStore
@@ -716,6 +726,26 @@
 
 .. _Django settings: ../settings/
 
+.. _topics-session-security:
+
+Session security
+================
+
+Subdomains within a site are able to set cookies on the client for the whole
+domain. This makes session fixation possible if cookies are permitted from
+subdomains not controlled by trusted users.
+
+For example, an attacker could log into ``good.example.com`` and get a valid
+session for his account. If the attacker has control over ``bad.example.com``,
+he can use it to send his session key to you since a subdomain is permitted
+to set cookies on ``*.example.com``. When you visit ``good.example.com``,
+you'll be logged in as the attacker and might inadvertently enter your
+sensitive personal data (e.g. credit card info) into the attackers account.
+
+Another possible attack would be if ``good.example.com`` sets its
+:setting:`SESSION_COOKIE_DOMAIN` to ``".example.com"`` which would cause
+session cookies from that site to be sent to ``bad.example.com``.
+
 Technical details
 =================
 
diff --git a/lib/django-1.5/docs/topics/http/shortcuts.txt b/lib/django-1.5/docs/topics/http/shortcuts.txt
index 6f4c78f..60d456b 100644
--- a/lib/django-1.5/docs/topics/http/shortcuts.txt
+++ b/lib/django-1.5/docs/topics/http/shortcuts.txt
@@ -81,7 +81,7 @@
 
     def my_view(request):
         # View code here...
-        t = loader.get_template('myapp/template.html')
+        t = loader.get_template('myapp/index.html')
         c = RequestContext(request, {'foo': 'bar'})
         return HttpResponse(t.render(c),
             content_type="application/xhtml+xml")
@@ -152,7 +152,7 @@
 
     def my_view(request):
         # View code here...
-        t = loader.get_template('myapp/template.html')
+        t = loader.get_template('myapp/index.html')
         c = Context({'foo': 'bar'})
         return HttpResponse(t.render(c),
             content_type="application/xhtml+xml")
@@ -167,7 +167,8 @@
 
    The arguments could be:
 
-   * A model: the model's `get_absolute_url()` function will be called.
+   * A model: the model's :meth:`~django.db.models.Model.get_absolute_url()`
+     function will be called.
 
    * A view name, possibly with arguments: :func:`urlresolvers.reverse
      <django.core.urlresolvers.reverse>` will be used to reverse-resolve the
diff --git a/lib/django-1.5/docs/topics/http/urls.txt b/lib/django-1.5/docs/topics/http/urls.txt
index cfe1f46..90a3f07 100644
--- a/lib/django-1.5/docs/topics/http/urls.txt
+++ b/lib/django-1.5/docs/topics/http/urls.txt
@@ -177,8 +177,8 @@
 methods -- ``POST``, ``GET``, ``HEAD``, etc. -- will be routed to the same
 function for the same URL.
 
-Notes on capturing text in URLs
-===============================
+Captured arguments are always strings
+=====================================
 
 Each captured argument is sent to the view as a plain Python string, regardless
 of what sort of match the regular expression makes. For example, in this
@@ -189,6 +189,9 @@
 ...the ``year`` argument to ``news.views.year_archive()`` will be a string, not
 an integer, even though the ``\d{4}`` will only match integer strings.
 
+Specifying defaults for view arguments
+======================================
+
 A convenient trick is to specify default parameters for your views' arguments.
 Here's an example URLconf and view::
 
@@ -366,6 +369,32 @@
 In this example, the ``/credit/reports/`` URL will be handled by the
 ``credit.views.report()`` Django view.
 
+This can be used to remove redundancy from URLconfs where a single pattern
+prefix is used repeatedly. For example, consider this URLconf::
+
+    from django.conf.urls import patterns, url
+
+    urlpatterns = patterns('wiki.views',
+        url(r'^(?P<page_slug>\w+)-(?P<page_id>\w+)/history/$', 'history'),
+        url(r'^(?P<page_slug>\w+)-(?P<page_id>\w+)/edit/$', 'edit'),
+        url(r'^(?P<page_slug>\w+)-(?P<page_id>\w+)/discuss/$', 'discuss'),
+        url(r'^(?P<page_slug>\w+)-(?P<page_id>\w+)/permissions/$', 'permissions'),
+    )
+
+We can improve this by stating the common path prefix only once and grouping
+the suffixes that differ::
+
+    from django.conf.urls import include, patterns, url
+
+    urlpatterns = patterns('',
+        url(r'^(?P<page_slug>\w+)-(?P<page_id>\w+)/', include(patterns('wiki.views',
+            url(r'^history/$', 'history'),
+            url(r'^edit/$', 'edit'),
+            url(r'^discuss/$', 'discuss'),
+            url(r'^permissions/$', 'permissions'),
+        ))),
+    )
+
 .. _`Django Web site`: https://www.djangoproject.com/
 
 Captured parameters
@@ -813,7 +842,7 @@
         url(r'^advanced/$', 'apps.help.views.views.advanced'),
     )
 
-    url(r'^help/', include(help_patterns, 'bar', 'foo')),
+    url(r'^help/', include((help_patterns, 'bar', 'foo'))),
 
 This will include the nominated URL patterns into the given application and
 instance namespace.
@@ -824,3 +853,8 @@
 site, plus the application namespace ``'admin'``, and the name of the admin
 instance. It is this ``urls`` attribute that you ``include()`` into your
 projects ``urlpatterns`` when you deploy an Admin instance.
+
+Be sure to pass a tuple to ``include()``. If you simply pass three arguments:
+``include(help_patterns, 'bar', 'foo')``, Django won't throw an error but due
+to the signature of ``include()``, ``'bar'`` will be the instance namespace and
+``'foo'`` will be the application namespace instead of vice versa.
diff --git a/lib/django-1.5/docs/topics/i18n/translation.txt b/lib/django-1.5/docs/topics/i18n/translation.txt
index 9e6052e..82dc538 100644
--- a/lib/django-1.5/docs/topics/i18n/translation.txt
+++ b/lib/django-1.5/docs/topics/i18n/translation.txt
@@ -708,7 +708,7 @@
         msgid "Go"
         msgstr ""
 
-        #. Translators:
+        #. Translators: This is a text of the base template
         # path/to/template/file.html:103
         msgid "Ambiguous translatable block of text"
         msgstr ""
@@ -954,8 +954,8 @@
 
 Server-side caching will reduce CPU load. It's easily implemented with the
 :func:`~django.views.decorators.cache.cache_page` decorator. To trigger cache
-invalidation when your translations change, provide a version-dependant key
-prefix, as shown in the example below, or map the view at a version-dependant
+invalidation when your translations change, provide a version-dependent key
+prefix, as shown in the example below, or map the view at a version-dependent
 URL.
 
 .. code-block:: python
@@ -1525,7 +1525,7 @@
 files <message file>` and their compiled versions (``.mo``) exist.
 
 If you want to let each individual user specify which language he or she
-prefers, then you also need to use use the ``LocaleMiddleware``.
+prefers, then you also need to use the ``LocaleMiddleware``.
 ``LocaleMiddleware`` enables language selection based on data from the request.
 It customizes content for each user.
 
@@ -1604,29 +1604,19 @@
   en-us).
 
 * If you define a custom :setting:`LANGUAGES` setting, as explained in the
-  previous bullet, it's OK to mark the languages as translation strings
-  -- but use a "dummy" ``ugettext()`` function, not the one in
-  ``django.utils.translation``. You should *never* import
-  ``django.utils.translation`` from within your settings file, because that
-  module in itself depends on the settings, and that would cause a circular
-  import.
+  previous bullet, you can mark the language names as translation strings
+  -- but use :func:`~django.utils.translation.ugettext_lazy` instead of
+  :func:`~django.utils.translation.ugettext` to avoid a circular import.
 
-  The solution is to use a "dummy" ``ugettext()`` function. Here's a sample
-  settings file::
+  Here's a sample settings file::
 
-      ugettext = lambda s: s
+      from django.utils.translation import ugettext_lazy as _
 
       LANGUAGES = (
-          ('de', ugettext('German')),
-          ('en', ugettext('English')),
+          ('de', _('German')),
+          ('en', _('English')),
       )
 
-  With this arrangement, :djadmin:`django-admin.py makemessages <makemessages>`
-  will still find and mark these strings for translation, but the translation
-  won't happen at runtime -- so you'll have to remember to wrap the languages in
-  the *real* ``ugettext()`` in any code that uses :setting:`LANGUAGES` at
-  runtime.
-
 * The ``LocaleMiddleware`` can only select languages for which there is a
   Django-provided base translation. If you want to provide translations
   for your application that aren't already in the set of translations
diff --git a/lib/django-1.5/docs/topics/logging.txt b/lib/django-1.5/docs/topics/logging.txt
index 90280dd..c089e31 100644
--- a/lib/django-1.5/docs/topics/logging.txt
+++ b/lib/django-1.5/docs/topics/logging.txt
@@ -218,14 +218,16 @@
 and the log levels and other properties that you want those components
 to have.
 
-Prior to Django 1.5, the :setting:`LOGGING` setting overwrote the :ref:`default
-Django logging configuration <default-logging-configuration>`. From Django
-1.5 forward, the project's logging configuration is merged with Django's
-defaults, hence you can decide if you want to add to, or replace the existing
-configuration. To completely override the default configuration, set the
-``disable_existing_loggers`` key to ``True`` (which is the default) in the
-:setting:`LOGGING` dictConfig. Alternatively you can redefine some or all of
-the loggers by setting ``disable_existing_loggers`` to ``False``.
+Prior to Django 1.5, the :setting:`LOGGING` setting always overwrote the
+:ref:`default Django logging configuration <default-logging-configuration>`.
+From Django 1.5 forward, it is possible to get the project's logging
+configuration merged with Django's defaults, hence you can decide if you want to
+add to, or replace the existing configuration.
+
+If the ``disable_existing_loggers`` key in the :setting:`LOGGING` dictConfig is
+set to ``True`` (which is the default) the default configuration is completely
+overridden. Alternatively you can redefine some or all of the loggers by
+setting ``disable_existing_loggers`` to ``False``.
 
 Logging is configured as soon as settings have been loaded
 (either manually using :func:`~django.conf.settings.configure` or when at least
diff --git a/lib/django-1.5/docs/topics/python3.txt b/lib/django-1.5/docs/topics/python3.txt
index 633ffe0..52d9aed 100644
--- a/lib/django-1.5/docs/topics/python3.txt
+++ b/lib/django-1.5/docs/topics/python3.txt
@@ -26,10 +26,10 @@
 encouraged to use the same porting strategy as Django itself.
 
 Writing compatible code is much easier if you target Python ≥ 2.6. Django 1.5
-introduces compatibility tools such as :mod:`django.utils.six`. For
-convenience, forwards-compatible aliases were introduced in Django 1.4.2. If
-your application takes advantage of these tools, it will require Django ≥
-1.4.2.
+introduces compatibility tools such as :mod:`django.utils.six`, which is a
+customized version of the :mod:`six module <six>`. For convenience,
+forwards-compatible aliases were introduced in Django 1.4.2. If your
+application takes advantage of these tools, it will require Django ≥ 1.4.2.
 
 Obviously, writing compatible source code adds some overhead, and that can
 cause frustration. Django's developers have found that attempting to write
@@ -325,8 +325,8 @@
 six_ is the canonical compatibility library for supporting Python 2 and 3 in
 a single codebase. Read its documentation!
 
-:mod:`six` is bundled with Django as of version 1.4.2. You can import it as
-:mod:`django.utils.six`.
+A :mod:`customized version of six <django.utils.six>` is bundled with Django
+as of version 1.4.2. You can import it as ``django.utils.six``.
 
 Here are the most common changes required to write compatible code.
 
@@ -361,8 +361,9 @@
 Moved modules
 ~~~~~~~~~~~~~
 
-Some modules were renamed in Python 3. The :mod:`django.utils.six.moves
-<six.moves>` module provides a compatible location to import them.
+Some modules were renamed in Python 3. The ``django.utils.six.moves``
+module (based on the :mod:`six.moves module <six.moves>`) provides a
+compatible location to import them.
 
 The ``urllib``, ``urllib2`` and ``urlparse`` modules were reworked in depth
 and :mod:`django.utils.six.moves <six.moves>` doesn't handle them. Django
@@ -388,10 +389,11 @@
 
 .. module:: django.utils.six
 
-Customizations of six
----------------------
+Django customized version of six
+--------------------------------
 
-The version of six bundled with Django includes a few extras.
+The version of six bundled with Django (``django.utils.six``) includes a few
+extras.
 
 .. function:: assertRaisesRegex(testcase, *args, **kwargs)
 
diff --git a/lib/django-1.5/docs/topics/security.txt b/lib/django-1.5/docs/topics/security.txt
index 7d921ad..4c30c37 100644
--- a/lib/django-1.5/docs/topics/security.txt
+++ b/lib/django-1.5/docs/topics/security.txt
@@ -202,6 +202,13 @@
 the ``X-Forwarded-Host`` header (via the :setting:`USE_X_FORWARDED_HOST`
 setting) if your configuration requires it.
 
+Session security
+================
+
+Similar to the :ref:`CSRF limitations <csrf-limitations>` requiring a site to
+be deployed such that untrusted users don't have access to any subdomains,
+:mod:`django.contrib.sessions` also has limitations. See :ref:`the session
+topic guide section on security <topics-session-security>` for details.
 
 .. _additional-security-topics:
 
diff --git a/lib/django-1.5/docs/topics/signing.txt b/lib/django-1.5/docs/topics/signing.txt
index 3da70da..0d001e7 100644
--- a/lib/django-1.5/docs/topics/signing.txt
+++ b/lib/django-1.5/docs/topics/signing.txt
@@ -76,8 +76,10 @@
 
 .. class:: Signer(key=None, sep=':', salt=None)
 
-    Returns a signer which uses ``key`` to generate signatures and ``sep``
-    to separate values.
+    Returns a signer which uses ``key`` to generate signatures and ``sep`` to
+    separate values. ``sep`` cannot be in the `URL safe base64 alphabet
+    <http://tools.ietf.org/html/rfc4648#section-5>`_.  This alphabet contains
+    alphanumeric characters, hyphens, and underscores.
 
 Using the salt argument
 -----------------------
diff --git a/lib/django-1.5/docs/topics/testing/advanced.txt b/lib/django-1.5/docs/topics/testing/advanced.txt
index c07b718..a43cfbb 100644
--- a/lib/django-1.5/docs/topics/testing/advanced.txt
+++ b/lib/django-1.5/docs/topics/testing/advanced.txt
@@ -46,7 +46,7 @@
             # Every test needs access to the request factory.
             self.factory = RequestFactory()
             self.user = User.objects.create_user(
-                first_name='jacob', email='jacob@…', password='top_secret')
+                username='jacob', email='jacob@…', password='top_secret')
 
         def test_details(self):
             # Create an instance of a GET request.
diff --git a/lib/django-1.5/docs/topics/testing/overview.txt b/lib/django-1.5/docs/topics/testing/overview.txt
index ea475e0..e71771a 100644
--- a/lib/django-1.5/docs/topics/testing/overview.txt
+++ b/lib/django-1.5/docs/topics/testing/overview.txt
@@ -1286,6 +1286,10 @@
 can be certain that the outcome of a test will not be affected by another test,
 or by the order of test execution.
 
+By default, fixtures are only loaded into the ``default`` database. If you are
+using multiple databases and set :attr:`multi_db=True
+<TestCase.multi_db>`, fixtures will be loaded into all databases.
+
 URLconf configuration
 ~~~~~~~~~~~~~~~~~~~~~
 
@@ -1351,12 +1355,17 @@
 This test case will flush *all* the test databases before running
 ``testIndexPageView``.
 
+The ``multi_db`` flag also affects into which databases the
+attr:`TransactionTestCase.fixtures` are loaded. By default (when
+``multi_db=False``), fixtures are only loaded into the ``default`` database.
+If ``multi_db=True``, fixtures are loaded into all databases.
+
 .. _overriding-settings:
 
 Overriding settings
 ~~~~~~~~~~~~~~~~~~~
 
-.. method:: TestCase.settings
+.. method:: TestCase.settings()
 
 .. versionadded:: 1.4
 
@@ -1479,7 +1488,7 @@
     failure. Similar to unittest's :meth:`~unittest.TestCase.assertRaisesRegexp`
     with the difference that ``expected_message`` isn't a regular expression.
 
-.. method:: SimpleTestCase.assertFieldOutput(self, fieldclass, valid, invalid, field_args=None, field_kwargs=None, empty_value=u'')
+.. method:: SimpleTestCase.assertFieldOutput(fieldclass, valid, invalid, field_args=None, field_kwargs=None, empty_value=u'')
 
     .. versionadded:: 1.4
 
diff --git a/lib/django-1.5/setup.py b/lib/django-1.5/setup.py
index 38db93d..8e44f31 100644
--- a/lib/django-1.5/setup.py
+++ b/lib/django-1.5/setup.py
@@ -85,7 +85,7 @@
     author_email='foundation@djangoproject.com',
     description=('A high-level Python Web framework that encourages '
                  'rapid development and clean, pragmatic design.'),
-    download_url='https://www.djangoproject.com/m/releases/1.5/Django-1.5.4.tar.gz',
+    download_url='https://www.djangoproject.com/m/releases/1.5/Django-1.5.8.tar.gz',
     license='BSD',
     packages=packages,
     package_data=package_data,
diff --git a/lib/django-1.5/tests/modeltests/defer/models.py b/lib/django-1.5/tests/modeltests/defer/models.py
index 0688cbc..93e3b95 100644
--- a/lib/django-1.5/tests/modeltests/defer/models.py
+++ b/lib/django-1.5/tests/modeltests/defer/models.py
@@ -28,3 +28,22 @@
 class ChildProxy(Child):
     class Meta:
         proxy=True
+
+class Profile(models.Model):
+    profile1 = models.CharField(max_length=1000, default='profile1')
+
+class Location(models.Model):
+    location1 = models.CharField(max_length=1000, default='location1')
+
+class Item(models.Model):
+    pass
+
+class Request(models.Model):
+    profile = models.ForeignKey(Profile, null=True, blank=True)
+    location = models.ForeignKey(Location)
+    items = models.ManyToManyField(Item)
+
+    request1 = models.CharField(default='request1', max_length=1000)
+    request2 = models.CharField(default='request2', max_length=1000)
+    request3 = models.CharField(default='request3', max_length=1000)
+    request4 = models.CharField(default='request4', max_length=1000)
diff --git a/lib/django-1.5/tests/modeltests/defer/tests.py b/lib/django-1.5/tests/modeltests/defer/tests.py
index 50db5a7..f027047 100644
--- a/lib/django-1.5/tests/modeltests/defer/tests.py
+++ b/lib/django-1.5/tests/modeltests/defer/tests.py
@@ -1,9 +1,10 @@
 from __future__ import absolute_import
 
+from django.db.models import Count
 from django.db.models.query_utils import DeferredAttribute, InvalidQuery
 from django.test import TestCase
 
-from .models import Secondary, Primary, Child, BigChild, ChildProxy
+from .models import Secondary, Primary, Child, BigChild, ChildProxy, Location, Request
 
 
 class DeferTests(TestCase):
@@ -183,3 +184,17 @@
         with self.assertNumQueries(0):
             bc_deferred.id
         self.assertEqual(bc_deferred.pk, bc_deferred.id)
+
+class DeferAnnotateSelectRelatedTest(TestCase):
+    def test_defer_annotate_select_related(self):
+        location = Location.objects.create()
+        Request.objects.create(location=location)
+        self.assertIsInstance(list(Request.objects
+            .annotate(Count('items')).select_related('profile', 'location')
+            .only('profile', 'location')), list)
+        self.assertIsInstance(list(Request.objects
+            .annotate(Count('items')).select_related('profile', 'location')
+            .only('profile__profile1', 'location__location1')), list)
+        self.assertIsInstance(list(Request.objects
+            .annotate(Count('items')).select_related('profile', 'location')
+            .defer('request1', 'request2', 'request3', 'request4')), list)
diff --git a/lib/django-1.5/tests/regressiontests/aggregation_regress/tests.py b/lib/django-1.5/tests/regressiontests/aggregation_regress/tests.py
index 71c90ec..65d345a 100644
--- a/lib/django-1.5/tests/regressiontests/aggregation_regress/tests.py
+++ b/lib/django-1.5/tests/regressiontests/aggregation_regress/tests.py
@@ -383,6 +383,17 @@
         qs = Entries.objects.annotate(clue_count=Count('clues__ID'))
         self.assertQuerysetEqual(qs, [])
 
+    def test_boolean_conversion(self):
+        # Aggregates mixed up ordering of columns for backend's convert_values
+        # method. Refs #21126.
+        e = Entries.objects.create(Entry='foo')
+        c = Clues.objects.create(EntryID=e, Clue='bar')
+        qs = Clues.objects.select_related('EntryID').annotate(Count('ID'))
+        self.assertQuerysetEqual(
+            qs, [c], lambda x: x)
+        self.assertEqual(qs[0].EntryID, e)
+        self.assertIs(qs[0].EntryID.Exclude, False)
+
     def test_empty(self):
         # Regression for #10089: Check handling of empty result sets with
         # aggregates
diff --git a/lib/django-1.5/tests/regressiontests/cache/tests.py b/lib/django-1.5/tests/regressiontests/cache/tests.py
index cd7da4c..6d38116 100644
--- a/lib/django-1.5/tests/regressiontests/cache/tests.py
+++ b/lib/django-1.5/tests/regressiontests/cache/tests.py
@@ -18,11 +18,13 @@
 from django.core.cache import get_cache
 from django.core.cache.backends.base import (CacheKeyWarning,
     InvalidCacheBackendError)
+from django.core.context_processors import csrf
 from django.db import router
 from django.http import (HttpResponse, HttpRequest, StreamingHttpResponse,
     QueryDict)
 from django.middleware.cache import (FetchFromCacheMiddleware,
     UpdateCacheMiddleware, CacheMiddleware)
+from django.middleware.csrf import CsrfViewMiddleware
 from django.template import Template
 from django.template.response import TemplateResponse
 from django.test import TestCase, TransactionTestCase, RequestFactory
@@ -1456,6 +1458,10 @@
     return HttpResponse('Hello World %s' % value)
 
 
+def csrf_view(request):
+    return HttpResponse(csrf(request)['csrf_token'])
+
+
 @override_settings(
         CACHE_MIDDLEWARE_ALIAS='other',
         CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
@@ -1696,6 +1702,27 @@
         response = other_with_timeout_view(request, '18')
         self.assertEqual(response.content, b'Hello World 18')
 
+    def test_sensitive_cookie_not_cached(self):
+        """
+        Django must prevent caching of responses that set a user-specific (and
+        maybe security sensitive) cookie in response to a cookie-less request.
+        """
+        csrf_middleware = CsrfViewMiddleware()
+        cache_middleware = CacheMiddleware()
+
+        request = self.factory.get('/view/')
+        self.assertIsNone(cache_middleware.process_request(request))
+
+        csrf_middleware.process_view(request, csrf_view, (), {})
+
+        response = csrf_view(request)
+
+        response = csrf_middleware.process_response(request, response)
+        response = cache_middleware.process_response(request, response)
+
+        # Inserting a CSRF cookie in a cookie-less request prevented caching.
+        self.assertIsNone(cache_middleware.process_request(request))
+
 
 @override_settings(
         CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
diff --git a/lib/django-1.5/tests/regressiontests/forms/tests/widgets.py b/lib/django-1.5/tests/regressiontests/forms/tests/widgets.py
index f9dc4a7..599ba53 100644
--- a/lib/django-1.5/tests/regressiontests/forms/tests/widgets.py
+++ b/lib/django-1.5/tests/regressiontests/forms/tests/widgets.py
@@ -1112,6 +1112,7 @@
         self.selenium.get('%s%s' % (self.live_server_url,
             reverse('article_form', args=[article.pk])))
         self.selenium.find_element_by_id('submit').submit()
+        self.wait_page_loaded()
         article = Article.objects.get(pk=article.pk)
         # Should be "\nTst\n" after #19251 is fixed
         self.assertEqual(article.content, "\r\nTst\r\n")
diff --git a/lib/django-1.5/tests/regressiontests/initial_sql_regress/sql/simple.sql b/lib/django-1.5/tests/regressiontests/initial_sql_regress/sql/simple.sql
index d82f838..3811be4 100644
--- a/lib/django-1.5/tests/regressiontests/initial_sql_regress/sql/simple.sql
+++ b/lib/django-1.5/tests/regressiontests/initial_sql_regress/sql/simple.sql
@@ -7,5 +7,5 @@
 INSERT INTO initial_sql_regress_simple (name) VALUES ('George');
 INSERT INTO initial_sql_regress_simple (name) VALUES ('Miles O''Brien');
 INSERT INTO initial_sql_regress_simple (name) VALUES ('Semicolon;Man');
-INSERT INTO initial_sql_regress_simple (name) VALUES ('This line has a Windows line ending');

+INSERT INTO initial_sql_regress_simple (name) VALUES ('This line has a Windows line ending');
 
diff --git a/lib/django-1.5/tests/regressiontests/model_fields/tests.py b/lib/django-1.5/tests/regressiontests/model_fields/tests.py
index a49894e..5fd4db1 100644
--- a/lib/django-1.5/tests/regressiontests/model_fields/tests.py
+++ b/lib/django-1.5/tests/regressiontests/model_fields/tests.py
@@ -6,8 +6,15 @@
 from django import test
 from django import forms
 from django.core.exceptions import ValidationError
+from django.db.models.fields import (
+    AutoField, BigIntegerField, BooleanField, CharField,
+    CommaSeparatedIntegerField, DateField, DateTimeField, DecimalField,
+    EmailField, FilePathField, FloatField, IntegerField, IPAddressField,
+    GenericIPAddressField, NullBooleanField, PositiveIntegerField,
+    PositiveSmallIntegerField, SlugField, SmallIntegerField, TextField,
+    TimeField, URLField)
 from django.db import models
-from django.db.models.fields.files import FieldFile
+from django.db.models.fields.files import FileField, ImageField, FieldFile
 from django.utils import six
 from django.utils import unittest
 
@@ -414,3 +421,89 @@
         field = d._meta.get_field('myfile')
         field.save_form_data(d, 'else.txt')
         self.assertEqual(d.myfile, 'else.txt')
+
+
+class PrepValueTest(test.TestCase):
+    def test_AutoField(self):
+        self.assertIsInstance(AutoField(primary_key=True).get_prep_value(1), int)
+
+    @unittest.skipIf(six.PY3, "Python 3 has no `long` type.")
+    def test_BigIntegerField(self):
+        self.assertIsInstance(BigIntegerField().get_prep_value(long(9999999999999999999)), long)
+
+    def test_BooleanField(self):
+        self.assertIsInstance(BooleanField().get_prep_value(True), bool)
+
+    def test_CharField(self):
+        self.assertIsInstance(CharField().get_prep_value(''), six.text_type)
+        self.assertIsInstance(CharField().get_prep_value(0), six.text_type)
+
+    def test_CommaSeparatedIntegerField(self):
+        self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value('1,2'), six.text_type)
+        self.assertIsInstance(CommaSeparatedIntegerField().get_prep_value(0), six.text_type)
+
+    def test_DateField(self):
+        self.assertIsInstance(DateField().get_prep_value(datetime.date.today()), datetime.date)
+
+    def test_DateTimeField(self):
+        self.assertIsInstance(DateTimeField().get_prep_value(datetime.datetime.now()), datetime.datetime)
+
+    def test_DecimalField(self):
+        self.assertIsInstance(DecimalField().get_prep_value(Decimal('1.2')), Decimal)
+
+    def test_EmailField(self):
+        self.assertIsInstance(EmailField().get_prep_value('mailbox@domain.com'), six.text_type)
+
+    def test_FileField(self):
+        self.assertIsInstance(FileField().get_prep_value('filename.ext'), six.text_type)
+        self.assertIsInstance(FileField().get_prep_value(0), six.text_type)
+
+    def test_FilePathField(self):
+        self.assertIsInstance(FilePathField().get_prep_value('tests.py'), six.text_type)
+        self.assertIsInstance(FilePathField().get_prep_value(0), six.text_type)
+
+    def test_FloatField(self):
+        self.assertIsInstance(FloatField().get_prep_value(1.2), float)
+
+    def test_ImageField(self):
+        self.assertIsInstance(ImageField().get_prep_value('filename.ext'), six.text_type)
+
+    def test_IntegerField(self):
+        self.assertIsInstance(IntegerField().get_prep_value(1), int)
+
+    def test_IPAddressField(self):
+        self.assertIsInstance(IPAddressField().get_prep_value('127.0.0.1'), six.text_type)
+        self.assertIsInstance(IPAddressField().get_prep_value(0), six.text_type)
+
+    def test_GenericIPAddressField(self):
+        self.assertIsInstance(GenericIPAddressField().get_prep_value('127.0.0.1'), six.text_type)
+        self.assertIsInstance(GenericIPAddressField().get_prep_value(0), six.text_type)
+
+    def test_NullBooleanField(self):
+        self.assertIsInstance(NullBooleanField().get_prep_value(True), bool)
+
+    def test_PositiveIntegerField(self):
+        self.assertIsInstance(PositiveIntegerField().get_prep_value(1), int)
+
+    def test_PositiveSmallIntegerField(self):
+        self.assertIsInstance(PositiveSmallIntegerField().get_prep_value(1), int)
+
+    def test_SlugField(self):
+        self.assertIsInstance(SlugField().get_prep_value('slug'), six.text_type)
+        self.assertIsInstance(SlugField().get_prep_value(0), six.text_type)
+
+    def test_SmallIntegerField(self):
+        self.assertIsInstance(SmallIntegerField().get_prep_value(1), int)
+
+    def test_TextField(self):
+        self.assertIsInstance(TextField().get_prep_value('Abc'), six.text_type)
+        self.assertIsInstance(TextField().get_prep_value(0), six.text_type)
+
+    def test_TimeField(self):
+        self.assertIsInstance(
+            TimeField().get_prep_value(datetime.datetime.now().time()),
+            datetime.time)
+
+    def test_URLField(self):
+        self.assertIsInstance(URLField().get_prep_value('http://domain.com'), six.text_type)
+
diff --git a/lib/django-1.5/tests/regressiontests/queries/models.py b/lib/django-1.5/tests/regressiontests/queries/models.py
index 4ce696c..143e620 100644
--- a/lib/django-1.5/tests/regressiontests/queries/models.py
+++ b/lib/django-1.5/tests/regressiontests/queries/models.py
@@ -401,3 +401,12 @@
     name = models.TextField()
     b = models.ForeignKey(ModelB, null=True)
     d = models.ForeignKey(ModelD)
+
+class Ticket21203Parent(models.Model):
+    parentid = models.AutoField(primary_key=True)
+    parent_bool = models.BooleanField(default=True)
+    created = models.DateTimeField(auto_now=True)
+
+class Ticket21203Child(models.Model):
+    childid = models.AutoField(primary_key=True)
+    parent = models.ForeignKey(Ticket21203Parent)
diff --git a/lib/django-1.5/tests/regressiontests/queries/tests.py b/lib/django-1.5/tests/regressiontests/queries/tests.py
index 33e5337..c7e746e 100644
--- a/lib/django-1.5/tests/regressiontests/queries/tests.py
+++ b/lib/django-1.5/tests/regressiontests/queries/tests.py
@@ -23,7 +23,8 @@
     Ranking, Related, Report, ReservedName, Tag, TvChef, Valid, X, Food, Eaten,
     Node, ObjectA, ObjectB, ObjectC, CategoryItem, SimpleCategory,
     SpecialCategory, OneToOneCategory, NullableName, ProxyCategory,
-    SingleObject, RelatedObject, ModelA, ModelD)
+    SingleObject, RelatedObject, ModelA, ModelD,
+    Ticket21203Parent, Ticket21203Child)
 
 
 class BaseQuerysetTest(TestCase):
@@ -2149,3 +2150,11 @@
             self.assertIn('LEFT OUTER JOIN', str(qs.query))
         else:
             self.assertNotIn('LEFT OUTER JOIN', str(qs.query))
+
+class Ticket21203Tests(TestCase):
+    def test_ticket_21203(self):
+        p = Ticket21203Parent.objects.create(parent_bool=True)
+        c = Ticket21203Child.objects.create(parent=p)
+        qs = Ticket21203Child.objects.select_related('parent').defer('parent__created')
+        self.assertQuerysetEqual(qs, [c], lambda x: x)
+        self.assertIs(qs[0].parent.parent_bool, True)
diff --git a/lib/django-1.5/tests/regressiontests/servers/tests.py b/lib/django-1.5/tests/regressiontests/servers/tests.py
index 4495596..8bf1312 100644
--- a/lib/django-1.5/tests/regressiontests/servers/tests.py
+++ b/lib/django-1.5/tests/regressiontests/servers/tests.py
@@ -5,6 +5,7 @@
 from __future__ import unicode_literals
 
 import os
+import socket
 try:
     from urllib.request import urlopen, HTTPError
 except ImportError:     # Python 2
@@ -12,7 +13,6 @@
 
 from django.core.exceptions import ImproperlyConfigured
 from django.test import LiveServerTestCase
-from django.core.servers.basehttp import WSGIServerException
 from django.test.utils import override_settings
 from django.utils.http import urlencode
 from django.utils._os import upath
@@ -66,7 +66,7 @@
         cls.raises_exception('localhost', ImproperlyConfigured)
 
         # The host must be valid
-        cls.raises_exception('blahblahblah:8081', WSGIServerException)
+        cls.raises_exception('blahblahblah:8081', socket.error)
 
         # The list of ports must be in a valid format
         cls.raises_exception('localhost:8081,', ImproperlyConfigured)
diff --git a/lib/django-1.5/tests/regressiontests/transactions_regress/tests.py b/lib/django-1.5/tests/regressiontests/transactions_regress/tests.py
index 5d1ab2c..01f4a90 100644
--- a/lib/django-1.5/tests/regressiontests/transactions_regress/tests.py
+++ b/lib/django-1.5/tests/regressiontests/transactions_regress/tests.py
@@ -4,7 +4,7 @@
 from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
 from django.test import TransactionTestCase, skipUnlessDBFeature
 from django.test.utils import override_settings
-from django.utils.unittest import skipIf, skipUnless
+from django.utils.unittest import skipIf, skipUnless, SkipTest
 
 from .models import Mod, M2mA, M2mB
 
@@ -247,11 +247,13 @@
 
         work()
 
-    @skipIf(connection.vendor == 'mysql' and \
-            connection.features._mysql_storage_engine == 'MyISAM',
-            "MyISAM MySQL storage engine doesn't support savepoints")
     @skipUnlessDBFeature('uses_savepoints')
     def test_savepoint_rollback(self):
+        # _mysql_storage_engine issues a query and as such can't be applied in
+        # a skipIf decorator since that would execute the query on module load.
+        if (connection.vendor == 'mysql' and
+            connection.features._mysql_storage_engine == 'MyISAM'):
+            raise SkipTest("MyISAM MySQL storage engine doesn't support savepoints")
         @commit_manually
         def work():
             mod = Mod.objects.create(fld=1)
diff --git a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/nonimported_module.py b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/nonimported_module.py
new file mode 100644
index 0000000..df04633
--- /dev/null
+++ b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/nonimported_module.py
@@ -0,0 +1,3 @@
+def view(request):
+    """Stub view"""
+    pass
diff --git a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/tests.py b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/tests.py
index eb3afe8..e3e14b3 100644
--- a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/tests.py
+++ b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/tests.py
@@ -1,8 +1,11 @@
+# -*- coding: utf-8 -*-
 """
 Unit tests for reverse URL lookups.
 """
 from __future__ import absolute_import, unicode_literals
 
+import sys
+
 from django.conf import settings
 from django.contrib.auth.models import User
 from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
@@ -290,6 +293,25 @@
         self.assertEqual(res['Location'], '/foo/')
         res = redirect('http://example.com/')
         self.assertEqual(res['Location'], 'http://example.com/')
+        # Assert that we can redirect using UTF-8 strings
+        res = redirect('/æøå/abc/')
+        self.assertEqual(res['Location'], '/%C3%A6%C3%B8%C3%A5/abc/')
+        # Assert that no imports are attempted when dealing with a relative path
+        # (previously, the below would resolve in a UnicodeEncodeError from __import__ )
+        res = redirect('/æøå.abc/')
+        self.assertEqual(res['Location'], '/%C3%A6%C3%B8%C3%A5.abc/')
+        res = redirect('os.path')
+        self.assertEqual(res['Location'], 'os.path')
+
+    def test_no_illegal_imports(self):
+        # modules that are not listed in urlpatterns should not be importable
+        redirect("urlpatterns_reverse.nonimported_module.view")
+        self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
+
+    def test_reverse_by_path_nested(self):
+        # Views that are added to urlpatterns using include() should be
+        # reversable by doted path.
+        self.assertEqual(reverse('regressiontests.urlpatterns_reverse.views.nested_view'), '/includes/nested_path/')
 
     def test_redirect_view_object(self):
         from .views import absolute_kwargs_view
@@ -559,4 +581,3 @@
         # swallow it.
         self.assertRaises(AttributeError, get_callable,
             'regressiontests.urlpatterns_reverse.views_broken.i_am_broken')
-
diff --git a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/urls.py b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/urls.py
index 1d4ae73..7aae7c4 100644
--- a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/urls.py
+++ b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/urls.py
@@ -2,11 +2,12 @@
 
 from django.conf.urls import patterns, url, include
 
-from .views import empty_view, absolute_kwargs_view
+from .views import empty_view, empty_view_partial, empty_view_wrapped, absolute_kwargs_view
 
 
 other_patterns = patterns('',
     url(r'non_path_include/$', empty_view, name='non_path_include'),
+    url(r'nested_path/$', 'regressiontests.urlpatterns_reverse.views.nested_view'),
 )
 
 urlpatterns = patterns('',
@@ -55,6 +56,10 @@
     # This is non-reversible, but we shouldn't blow up when parsing it.
     url(r'^(?:foo|bar)(\w+)/$', empty_view, name="disjunction"),
 
+    # Partials should be fine.
+    url(r'^partial/', empty_view_partial, name="partial"),
+    url(r'^partial_wrapped/', empty_view_wrapped, name="partial_wrapped"),
+
     # Regression views for #9038. See tests for more details
     url(r'arg_view/$', 'kwargs_view'),
     url(r'arg_view/(?P<arg1>\d+)/$', 'kwargs_view'),
diff --git a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/views.py b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/views.py
index 88d169a..dd1d97c 100644
--- a/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/views.py
+++ b/lib/django-1.5/tests/regressiontests/urlpatterns_reverse/views.py
@@ -1,3 +1,5 @@
+from functools import partial, update_wrapper
+
 from django.http import HttpResponse
 from django.views.generic import RedirectView
 from django.core.urlresolvers import reverse_lazy
@@ -16,6 +18,10 @@
 def defaults_view(request, arg1, arg2):
     pass
 
+def nested_view(request):
+    pass
+
+
 def erroneous_view(request):
     import non_existent
 
@@ -41,3 +47,11 @@
 
 def bad_view(request, *args, **kwargs):
     raise ValueError("I don't think I'm getting good value for this view")
+
+
+empty_view_partial = partial(empty_view, template_name="template.html")
+
+
+empty_view_wrapped = update_wrapper(
+    partial(empty_view, template_name="template.html"), empty_view,
+)
diff --git a/lib/django-1.5/tests/regressiontests/utils/http.py b/lib/django-1.5/tests/regressiontests/utils/http.py
index 6d3bc02..87a6ba4 100644
--- a/lib/django-1.5/tests/regressiontests/utils/http.py
+++ b/lib/django-1.5/tests/regressiontests/utils/http.py
@@ -67,50 +67,6 @@
         ]
         self.assertTrue(result in acceptable_results)
 
-    def test_fix_IE_for_vary(self):
-        """
-        Regression for #16632.
-
-        `fix_IE_for_vary` shouldn't crash when there's no Content-Type header.
-        """
-
-        # functions to generate responses
-        def response_with_unsafe_content_type():
-            r = HttpResponse(content_type="text/unsafe")
-            r['Vary'] = 'Cookie'
-            return r
-
-        def no_content_response_with_unsafe_content_type():
-            # 'Content-Type' always defaulted, so delete it
-            r = response_with_unsafe_content_type()
-            del r['Content-Type']
-            return r
-
-        # request with & without IE user agent
-        rf = RequestFactory()
-        request = rf.get('/')
-        ie_request = rf.get('/', HTTP_USER_AGENT='MSIE')
-
-        # not IE, unsafe_content_type
-        response = response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(request, response)
-        self.assertTrue('Vary' in response)
-
-        # IE, unsafe_content_type
-        response = response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(ie_request, response)
-        self.assertFalse('Vary' in response)
-
-        # not IE, no_content
-        response = no_content_response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(request, response)
-        self.assertTrue('Vary' in response)
-
-        # IE, no_content
-        response = no_content_response_with_unsafe_content_type()
-        utils.fix_IE_for_vary(ie_request, response)
-        self.assertFalse('Vary' in response)
-
     def test_base36(self):
         # reciprocity works
         for n in [0, 1, 1000, 1000000]:
@@ -135,6 +91,35 @@
             self.assertEqual(http.int_to_base36(n), b36)
             self.assertEqual(http.base36_to_int(b36), n)
 
+    def test_is_safe_url(self):
+        for bad_url in ('http://example.com',
+                        'http:///example.com',
+                        'https://example.com',
+                        'ftp://exampel.com',
+                        r'\\example.com',
+                        r'\\\example.com',
+                        r'/\\/example.com',
+                        r'\\\example.com',
+                        r'\\example.com',
+                        r'\\//example.com',
+                        r'/\/example.com',
+                        r'\/example.com',
+                        r'/\example.com',
+                        'http:///example.com',
+                        'http:/\//example.com',
+                        'http:\/example.com',
+                        'http:/\example.com',
+                        'javascript:alert("XSS")'):
+            self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
+        for good_url in ('/view/?param=http://example.com',
+                     '/view/?param=https://example.com',
+                     '/view?param=ftp://exampel.com',
+                     'view/?param=//example.com',
+                     'https://testserver/',
+                     'HTTPS://testserver/',
+                     '//testserver/',
+                     '/url%20with%20spaces/'):
+            self.assertTrue(http.is_safe_url(good_url, host='testserver'), "%s should be allowed" % good_url)
 
 class ETagProcessingTests(unittest.TestCase):
     def testParsing(self):
diff --git a/lib/django-1.5/tests/regressiontests/utils/simplelazyobject.py b/lib/django-1.5/tests/regressiontests/utils/simplelazyobject.py
index 2dd382c..5ef80a8 100644
--- a/lib/django-1.5/tests/regressiontests/utils/simplelazyobject.py
+++ b/lib/django-1.5/tests/regressiontests/utils/simplelazyobject.py
@@ -3,6 +3,8 @@
 import copy
 import pickle
 
+from django.contrib.auth.models import User
+from django.test import TestCase as DjangoTestCase
 from django.test.utils import str_prefix
 from django.utils import six
 from django.utils.unittest import TestCase
@@ -122,9 +124,10 @@
         self.assertEqual(six.text_type(unpickled), six.text_type(x))
         self.assertEqual(unpickled.name, x.name)
 
-    def test_pickle_py2_regression(self):
-        from django.contrib.auth.models import User
 
+class TestUtilsSimpleLazyObjectDjangoTestCase(DjangoTestCase):
+
+    def test_pickle_py2_regression(self):
         # See ticket #20212
         user = User.objects.create_user('johndoe', 'john@example.com', 'pass')
         x = SimpleLazyObject(lambda: user)
diff --git a/lib/django-1.5/tests/regressiontests/utils/text.py b/lib/django-1.5/tests/regressiontests/utils/text.py
index ebf6795..3709dcd 100644
--- a/lib/django-1.5/tests/regressiontests/utils/text.py
+++ b/lib/django-1.5/tests/regressiontests/utils/text.py
@@ -121,3 +121,16 @@
         )
         for value, output in items:
             self.assertEqual(text.slugify(value), output)
+
+    def test_unescape_entities(self):
+        items = [
+            ('', ''),
+            ('foo', 'foo'),
+            ('&amp;', '&'),
+            ('&#x26;', '&'),
+            ('&#38;', '&'),
+            ('foo &amp; bar', 'foo & bar'),
+            ('foo & bar', 'foo & bar'),
+        ]
+        for value, output in items:
+            self.assertEqual(text.unescape_entities(value), output)
diff --git a/lib/django-1.5/tests/requirements/base.txt b/lib/django-1.5/tests/requirements/base.txt
index 99e2b7f..67d1e79 100644
--- a/lib/django-1.5/tests/requirements/base.txt
+++ b/lib/django-1.5/tests/requirements/base.txt
@@ -1,5 +1,6 @@
 docutils
 Markdown
+numpy
 PyYAML
 pytz
 selenium
diff --git a/lib/docker/__init__.py b/lib/docker/__init__.py
index 3099b58..b740be3 100644
--- a/lib/docker/__init__.py
+++ b/lib/docker/__init__.py
@@ -1 +1,2 @@
-from .docker import Client, APIError
+from .docker import Client
+from .docker import errors
diff --git a/lib/docker/docker/__init__.py b/lib/docker/docker/__init__.py
index 5f642a8..e10a576 100644
--- a/lib/docker/docker/__init__.py
+++ b/lib/docker/docker/__init__.py
@@ -12,4 +12,7 @@
 #    See the License for the specific language governing permissions and
 #    limitations under the License.
 
-from .client import Client, APIError  # flake8: noqa
+__title__ = 'docker-py'
+__version__ = '0.3.0'
+
+from .client import Client # flake8: noqa
diff --git a/lib/docker/docker/auth/auth.py b/lib/docker/docker/auth/auth.py
index bef010f..0bd386d 100644
--- a/lib/docker/docker/auth/auth.py
+++ b/lib/docker/docker/auth/auth.py
@@ -20,6 +20,7 @@
 import six
 
 from ..utils import utils
+from .. import errors
 
 INDEX_URL = 'https://index.docker.io/v1/'
 DOCKER_CONFIG_FILENAME = '.dockercfg'
@@ -45,18 +46,19 @@
 
 def resolve_repository_name(repo_name):
     if '://' in repo_name:
-        raise ValueError('Repository name cannot contain a '
-                         'scheme ({0})'.format(repo_name))
+        raise errors.InvalidRepository(
+            'Repository name cannot contain a scheme ({0})'.format(repo_name))
     parts = repo_name.split('/', 1)
-    if not '.' in parts[0] and not ':' in parts[0] and parts[0] != 'localhost':
+    if '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost':
         # This is a docker index repo (ex: foo/bar or ubuntu)
         return INDEX_URL, repo_name
     if len(parts) < 2:
-        raise ValueError('Invalid repository name ({0})'.format(repo_name))
+        raise errors.InvalidRepository(
+            'Invalid repository name ({0})'.format(repo_name))
 
     if 'index.docker.io' in parts[0]:
-        raise ValueError('Invalid repository name,'
-                         'try "{0}" instead'.format(parts[1]))
+        raise errors.InvalidRepository(
+            'Invalid repository name, try "{0}" instead'.format(parts[1]))
 
     return expand_registry_url(parts[0]), parts[1]
 
@@ -87,6 +89,11 @@
     return authconfig.get(swap_protocol(registry), None)
 
 
+def encode_auth(auth_info):
+    return base64.b64encode(auth_info.get('username', '') + b':' +
+                            auth_info.get('password', ''))
+
+
 def decode_auth(auth):
     if isinstance(auth, six.string_types):
         auth = auth.encode('ascii')
@@ -100,6 +107,12 @@
     return base64.b64encode(auth_json)
 
 
+def encode_full_header(auth):
+    """ Returns the given auth block encoded for the X-Registry-Config header.
+    """
+    return encode_header({'configs': auth})
+
+
 def load_config(root=None):
     """Loads authentication data from a Docker configuration file in the given
     root directory."""
@@ -136,7 +149,8 @@
             data.append(line.strip().split(' = ')[1])
         if len(data) < 2:
             # Not enough data
-            raise Exception('Invalid or empty configuration file!')
+            raise errors.InvalidConfigFile(
+                'Invalid or empty configuration file!')
 
         username, password = decode_auth(data[0])
         conf[INDEX_URL] = {
diff --git a/lib/docker/docker/client.py b/lib/docker/docker/client.py
index 531a926..4bb8141 100644
--- a/lib/docker/docker/client.py
+++ b/lib/docker/docker/client.py
@@ -16,6 +16,7 @@
 import re
 import shlex
 import struct
+import warnings
 
 import requests
 import requests.exceptions
@@ -24,50 +25,16 @@
 from .auth import auth
 from .unixconn import unixconn
 from .utils import utils
+from . import errors
 
 if not six.PY3:
     import websocket
 
-DEFAULT_DOCKER_API_VERSION = '1.8'
+DEFAULT_DOCKER_API_VERSION = '1.9'
 DEFAULT_TIMEOUT_SECONDS = 60
 STREAM_HEADER_SIZE_BYTES = 8
 
 
-class APIError(requests.exceptions.HTTPError):
-    def __init__(self, message, response, explanation=None):
-        # requests 1.2 supports response as a keyword argument, but
-        # requests 1.1 doesn't
-        super(APIError, self).__init__(message)
-        self.response = response
-
-        self.explanation = explanation
-
-        if self.explanation is None and response.content:
-            self.explanation = response.content.strip()
-
-    def __str__(self):
-        message = super(APIError, self).__str__()
-
-        if self.is_client_error():
-            message = '%s Client Error: %s' % (
-                self.response.status_code, self.response.reason)
-
-        elif self.is_server_error():
-            message = '%s Server Error: %s' % (
-                self.response.status_code, self.response.reason)
-
-        if self.explanation:
-            message = '%s ("%s")' % (message, self.explanation)
-
-        return message
-
-    def is_client_error(self):
-        return 400 <= self.response.status_code < 500
-
-    def is_server_error(self):
-        return 500 <= self.response.status_code < 600
-
-
 class Client(requests.Session):
     def __init__(self, base_url=None, version=DEFAULT_DOCKER_API_VERSION,
                  timeout=DEFAULT_TIMEOUT_SECONDS):
@@ -112,7 +79,7 @@
         try:
             response.raise_for_status()
         except requests.exceptions.HTTPError as e:
-            raise APIError(e, response, explanation=explanation)
+            raise errors.APIError(e, response, explanation=explanation)
 
     def _result(self, response, json=False, binary=False):
         assert not (json and binary)
@@ -129,7 +96,7 @@
                           mem_limit=0, ports=None, environment=None, dns=None,
                           volumes=None, volumes_from=None,
                           network_disabled=False, entrypoint=None,
-                          cpu_shares=None, working_dir=None):
+                          cpu_shares=None, working_dir=None, domainname=None):
         if isinstance(command, six.string_types):
             command = shlex.split(str(command))
         if isinstance(environment, dict):
@@ -155,6 +122,13 @@
                 volumes_dict[vol] = {}
             volumes = volumes_dict
 
+        if volumes_from:
+            if not isinstance(volumes_from, six.string_types):
+                volumes_from = ','.join(volumes_from)
+        else:
+            # Force None, an empty list or dict causes client.start to fail
+            volumes_from = None
+
         attach_stdin = False
         attach_stdout = False
         attach_stderr = False
@@ -168,27 +142,36 @@
                 attach_stdin = True
                 stdin_once = True
 
+        if utils.compare_version('1.10', self._version) >= 0:
+            message = ('{0!r} parameter has no effect on create_container().'
+                       ' It has been moved to start()')
+            if dns is not None:
+                raise errors.DockerException(message.format('dns'))
+            if volumes_from is not None:
+                raise errors.DockerException(message.format('volumes_from'))
+
         return {
-            'Hostname':     hostname,
+            'Hostname': hostname,
+            'Domainname': domainname,
             'ExposedPorts': ports,
-            'User':         user,
-            'Tty':          tty,
-            'OpenStdin':    stdin_open,
-            'StdinOnce':    stdin_once,
-            'Memory':       mem_limit,
-            'AttachStdin':  attach_stdin,
+            'User': user,
+            'Tty': tty,
+            'OpenStdin': stdin_open,
+            'StdinOnce': stdin_once,
+            'Memory': mem_limit,
+            'AttachStdin': attach_stdin,
             'AttachStdout': attach_stdout,
             'AttachStderr': attach_stderr,
-            'Env':          environment,
-            'Cmd':          command,
-            'Dns':          dns,
-            'Image':        image,
-            'Volumes':      volumes,
-            'VolumesFrom':  volumes_from,
+            'Env': environment,
+            'Cmd': command,
+            'Dns': dns,
+            'Image': image,
+            'Volumes': volumes,
+            'VolumesFrom': volumes_from,
             'NetworkDisabled': network_disabled,
-            'Entrypoint':   entrypoint,
-            'CpuShares':    cpu_shares,
-            'WorkingDir':    working_dir
+            'Entrypoint': entrypoint,
+            'CpuShares': cpu_shares,
+            'WorkingDir': working_dir
         }
 
     def _post_json(self, url, data, **kwargs):
@@ -226,31 +209,18 @@
     def _create_websocket_connection(self, url):
         return websocket.create_connection(url)
 
-    def _stream_result(self, response):
-        """Generator for straight-out, non chunked-encoded HTTP responses."""
+    def _get_raw_response_socket(self, response):
         self._raise_for_status(response)
-        for line in response.iter_lines(chunk_size=1, decode_unicode=True):
-            # filter out keep-alive new lines
-            if line:
-                yield line + '\n'
-
-    def _stream_result_socket(self, response):
-        self._raise_for_status(response)
-        return response.raw._fp.fp._sock
+        if six.PY3:
+            return response.raw._fp.fp.raw._sock
+        else:
+            return response.raw._fp.fp._sock
 
     def _stream_helper(self, response):
         """Generator for data coming from a chunked-encoded HTTP response."""
-        socket_fp = self._stream_result_socket(response)
-        socket_fp.setblocking(1)
-        socket = socket_fp.makefile()
-        while True:
-            size = int(socket.readline(), 16)
-            if size <= 0:
-                break
-            data = socket.readline()
-            if not data:
-                break
-            yield data
+        for line in response.iter_lines(chunk_size=32):
+            if line:
+                yield line
 
     def _multiplexed_buffer_helper(self, response):
         """A generator of multiplexed data blocks read from a buffered
@@ -269,17 +239,20 @@
     def _multiplexed_socket_stream_helper(self, response):
         """A generator of multiplexed data blocks coming from a response
         socket."""
-        socket = self._stream_result_socket(response)
+        socket = self._get_raw_response_socket(response)
 
         def recvall(socket, size):
-            data = ''
+            blocks = []
             while size > 0:
                 block = socket.recv(size)
                 if not block:
                     return None
 
-                data += block
+                blocks.append(block)
                 size -= len(block)
+
+            sep = bytes() if six.PY3 else str()
+            data = sep.join(blocks)
             return data
 
         while True:
@@ -308,9 +281,18 @@
         u = self._url("/containers/{0}/attach".format(container))
         response = self._post(u, params=params, stream=stream)
 
-        # Stream multi-plexing was introduced in API v1.6.
+        # Stream multi-plexing was only introduced in API v1.6. Anything before
+        # that needs old-style streaming.
         if utils.compare_version('1.6', self._version) < 0:
-            return stream and self._stream_result(response) or \
+            def stream_result():
+                self._raise_for_status(response)
+                for line in response.iter_lines(chunk_size=1,
+                                                decode_unicode=True):
+                    # filter out keep-alive new lines
+                    if line:
+                        yield line
+
+            return stream_result() if stream else \
                 self._result(response, binary=True)
 
         return stream and self._multiplexed_socket_stream_helper(response) or \
@@ -323,28 +305,39 @@
                 'stderr': 1,
                 'stream': 1
             }
+
         if ws:
             return self._attach_websocket(container, params)
 
         if isinstance(container, dict):
             container = container.get('Id')
+
         u = self._url("/containers/{0}/attach".format(container))
-        return self._stream_result_socket(self.post(
+        return self._get_raw_response_socket(self.post(
             u, None, params=self._attach_params(params), stream=True))
 
     def build(self, path=None, tag=None, quiet=False, fileobj=None,
-              nocache=False, rm=False, stream=False, timeout=None):
+              nocache=False, rm=False, stream=False, timeout=None,
+              custom_context=False, encoding=None):
         remote = context = headers = None
         if path is None and fileobj is None:
-            raise Exception("Either path or fileobj needs to be provided.")
+            raise TypeError("Either path or fileobj needs to be provided.")
 
-        if fileobj is not None:
+        if custom_context:
+            if not fileobj:
+                raise TypeError("You must specify fileobj with custom_context")
+            context = fileobj
+        elif fileobj is not None:
             context = utils.mkbuildcontext(fileobj)
-        elif path.startswith(('http://', 'https://', 'git://', 'github.com/')):
+        elif path.startswith(('http://', 'https://',
+                              'git://', 'github.com/')):
             remote = path
         else:
             context = utils.tar(path)
 
+        if utils.compare_version('1.8', self._version) >= 0:
+            stream = True
+
         u = self._url('/build')
         params = {
             't': tag,
@@ -353,8 +346,24 @@
             'nocache': nocache,
             'rm': rm
         }
+
         if context is not None:
             headers = {'Content-Type': 'application/tar'}
+            if encoding:
+                headers['Content-Encoding'] = encoding
+
+        if utils.compare_version('1.9', self._version) >= 0:
+            # If we don't have any auth data so far, try reloading the config
+            # file one more time in case anything showed up in there.
+            if not self._auth_configs:
+                self._auth_configs = auth.load_config()
+
+            # Send the full auth configuration (if any exists), since the build
+            # could use any (or all) of the registries.
+            if self._auth_configs:
+                headers['X-Registry-Config'] = auth.encode_full_header(
+                    self._auth_configs
+                )
 
         response = self._post(
             u,
@@ -367,8 +376,9 @@
 
         if context is not None:
             context.close()
-        if stream or utils.compare_version('1.8', self._version) >= 0:
-            return self._stream_result(response)
+
+        if stream:
+            return self._stream_helper(response)
         else:
             output = self._result(response)
             srch = r'Successfully built ([0-9a-f]+)'
@@ -422,12 +432,12 @@
                          mem_limit=0, ports=None, environment=None, dns=None,
                          volumes=None, volumes_from=None,
                          network_disabled=False, name=None, entrypoint=None,
-                         cpu_shares=None, working_dir=None):
+                         cpu_shares=None, working_dir=None, domainname=None):
 
         config = self._container_config(
             image, command, hostname, user, detach, stdin_open, tty, mem_limit,
             ports, environment, dns, volumes, volumes_from, network_disabled,
-            entrypoint, cpu_shares, working_dir
+            entrypoint, cpu_shares, working_dir, domainname
         )
         return self.create_container_from_config(config, name)
 
@@ -446,21 +456,7 @@
                             format(container))), True)
 
     def events(self):
-        u = self._url("/events")
-
-        socket = self._stream_result_socket(self.get(u, stream=True))
-
-        while True:
-            chunk = socket.recv(4096)
-            if chunk:
-                # Messages come in the format of length, data, newline.
-                length, data = chunk.split("\n", 1)
-                length = int(length, 16)
-                if length > len(data):
-                    data += socket.recv(length - len(data))
-                yield json.loads(data)
-            else:
-                break
+        return self._stream_helper(self.get(self._url('/events'), stream=True))
 
     def export(self, container):
         if isinstance(container, dict):
@@ -626,7 +622,7 @@
                 self._auth_configs = auth.load_config()
             authcfg = auth.resolve_authconfig(self._auth_configs, registry)
 
-            # Do not fail here if no atuhentication exists for this specific
+            # Do not fail here if no authentication exists for this specific
             # registry as we can have a readonly pull. Just put the header if
             # we can.
             if authcfg:
@@ -652,7 +648,7 @@
                 self._auth_configs = auth.load_config()
             authcfg = auth.resolve_authconfig(self._auth_configs, registry)
 
-            # Do not fail here if no atuhentication exists for this specific
+            # Do not fail here if no authentication exists for this specific
             # registry as we can have a readonly pull. Just put the header if
             # we can.
             if authcfg:
@@ -660,7 +656,7 @@
 
             response = self._post_json(u, None, headers=headers, stream=stream)
         else:
-            response = self._post_json(u, authcfg, stream=stream)
+            response = self._post_json(u, None, stream=stream)
 
         return stream and self._stream_helper(response) \
             or self._result(response)
@@ -691,7 +687,8 @@
                             True)
 
     def start(self, container, binds=None, port_bindings=None, lxc_conf=None,
-              publish_all_ports=False, links=None, privileged=False):
+              publish_all_ports=False, links=None, privileged=False,
+              dns=None, volumes_from=None):
         if isinstance(container, dict):
             container = container.get('Id')
 
@@ -706,8 +703,12 @@
         }
         if binds:
             bind_pairs = [
-                '{0}:{1}'.format(host, dest) for host, dest in binds.items()
+                '%s:%s:%s' % (
+                    h, d['bind'],
+                    'ro' if 'ro' in d and d['ro'] else 'rw'
+                ) for h, d in binds.items()
             ]
+
             start_config['Binds'] = bind_pairs
 
         if port_bindings:
@@ -729,6 +730,25 @@
 
         start_config['Privileged'] = privileged
 
+        if utils.compare_version('1.10', self._version) >= 0:
+            if dns is not None:
+                start_config['Dns'] = dns
+            if volumes_from is not None:
+                if isinstance(volumes_from, six.string_types):
+                    volumes_from = volumes_from.split(',')
+                start_config['VolumesFrom'] = volumes_from
+        else:
+            warning_message = ('{0!r} parameter is discarded. It is only'
+                               ' available for API version greater or equal'
+                               ' than 1.10')
+
+            if dns is not None:
+                warnings.warn(warning_message.format('dns'),
+                              DeprecationWarning)
+            if volumes_from is not None:
+                warnings.warn(warning_message.format('volumes_from'),
+                              DeprecationWarning)
+
         url = self._url("/containers/{0}/start".format(container))
         res = self._post_json(url, data=start_config)
         self._raise_for_status(res)
@@ -770,3 +790,8 @@
         if 'StatusCode' in json_:
             return json_['StatusCode']
         return -1
+
+    # TODO(gloom): add to public docker-py source.
+    def load_image(self, path_to_tar):
+        u = self._url("/images/load")
+        return self._result(self._post(u, data=open(path_to_tar, 'rb').read()))
diff --git a/lib/docker/docker/errors.py b/lib/docker/docker/errors.py
new file mode 100644
index 0000000..9aad700
--- /dev/null
+++ b/lib/docker/docker/errors.py
@@ -0,0 +1,61 @@
+#    Copyright 2014 dotCloud inc.
+#    Licensed under the Apache License, Version 2.0 (the "License");
+#    you may not use this file except in compliance with the License.
+#    You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+
+import requests
+
+
+class APIError(requests.exceptions.HTTPError):
+    def __init__(self, message, response, explanation=None):
+        # requests 1.2 supports response as a keyword argument, but
+        # requests 1.1 doesn't
+        super(APIError, self).__init__(message)
+        self.response = response
+
+        self.explanation = explanation
+
+        if self.explanation is None and response.content:
+            self.explanation = response.content.strip()
+
+    def __str__(self):
+        message = super(APIError, self).__str__()
+
+        if self.is_client_error():
+            message = '%s Client Error: %s' % (
+                self.response.status_code, self.response.reason)
+
+        elif self.is_server_error():
+            message = '%s Server Error: %s' % (
+                self.response.status_code, self.response.reason)
+
+        if self.explanation:
+            message = '%s ("%s")' % (message, self.explanation)
+
+        return message
+
+    def is_client_error(self):
+        return 400 <= self.response.status_code < 500
+
+    def is_server_error(self):
+        return 500 <= self.response.status_code < 600
+
+
+class DockerException(Exception):
+    pass
+
+
+class InvalidRepository(DockerException):
+    pass
+
+
+class InvalidConfigFile(DockerException):
+    pass
diff --git a/lib/docker/docker/unixconn/unixconn.py b/lib/docker/docker/unixconn/unixconn.py
index 28068f3..3d3f7bc 100644
--- a/lib/docker/docker/unixconn/unixconn.py
+++ b/lib/docker/docker/unixconn/unixconn.py
@@ -40,7 +40,7 @@
         self.sock = sock
 
     def _extract_path(self, url):
-        #remove the base_url entirely..
+        # remove the base_url entirely..
         return url.replace(self.base_url, "")
 
     def request(self, method, url, **kwargs):
diff --git a/lib/docker/docker/utils/__init__.py b/lib/docker/docker/utils/__init__.py
index 386a01a..8a85975 100644
--- a/lib/docker/docker/utils/__init__.py
+++ b/lib/docker/docker/utils/__init__.py
@@ -1,3 +1,3 @@
 from .utils import (
-    compare_version, convert_port_bindings, mkbuildcontext, ping, tar
+    compare_version, convert_port_bindings, mkbuildcontext, ping, tar, parse_repository_tag
 ) # flake8: noqa
diff --git a/lib/docker/docker/utils/utils.py b/lib/docker/docker/utils/utils.py
index 2b53439..9edd71a 100644
--- a/lib/docker/docker/utils/utils.py
+++ b/lib/docker/docker/utils/utils.py
@@ -15,6 +15,7 @@
 import io
 import tarfile
 import tempfile
+from distutils.version import StrictVersion
 
 import requests
 import six
@@ -51,7 +52,25 @@
 
 
 def compare_version(v1, v2):
-    return float(v2) - float(v1)
+    """Compare docker versions
+
+    >>> v1 = '1.9'
+    >>> v2 = '1.10'
+    >>> compare_version(v1, v2)
+    1
+    >>> compare_version(v2, v1)
+    -1
+    >>> compare_version(v2, v2)
+    0
+    """
+    s1 = StrictVersion(v1)
+    s2 = StrictVersion(v2)
+    if s1 == s2:
+        return 0
+    elif s1 > s2:
+        return -1
+    else:
+        return 1
 
 
 def ping(url):
@@ -95,3 +114,15 @@
         else:
             result[key] = [_convert_port_binding(v)]
     return result
+
+
+def parse_repository_tag(repo):
+    column_index = repo.rfind(':')
+    if column_index < 0:
+        return repo, ""
+    tag = repo[column_index+1:]
+    slash_index = tag.find('/')
+    if slash_index < 0:
+        return repo[:column_index], tag
+
+    return repo, ""
diff --git a/lib/protorpc-1.0/protorpc/messages.py b/lib/protorpc-1.0/protorpc/messages.py
index f81ecc3..f226da4 100644
--- a/lib/protorpc-1.0/protorpc/messages.py
+++ b/lib/protorpc-1.0/protorpc/messages.py
@@ -866,7 +866,10 @@
       if name not in message_type.__by_name:
         raise AttributeError('Message %s has no field %s' % (
             message_type.__name__, name))
-    self.__tags.pop(field.number, None)
+    if field.repeated:
+      self.__tags[field.number] = FieldList(field, [])
+    else:
+      self.__tags.pop(field.number, None)
 
   def all_unrecognized_fields(self):
     """Get the names of all unrecognized fields in this message."""
diff --git a/php/sdk/google/appengine/datastore/datastore_v4_pb.php b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
index e05d911..bfa1391 100644
--- a/php/sdk/google/appengine/datastore/datastore_v4_pb.php
+++ b/php/sdk/google/appengine/datastore/datastore_v4_pb.php
@@ -2075,12 +2075,34 @@
     public function hasSkippedResults() {
       return isset($this->skipped_results);
     }
+    public function getSnapshotVersion() {
+      if (!isset($this->snapshot_version)) {
+        return "0";
+      }
+      return $this->snapshot_version;
+    }
+    public function setSnapshotVersion($val) {
+      if (is_double($val)) {
+        $this->snapshot_version = sprintf('%0.0F', $val);
+      } else {
+        $this->snapshot_version = $val;
+      }
+      return $this;
+    }
+    public function clearSnapshotVersion() {
+      unset($this->snapshot_version);
+      return $this;
+    }
+    public function hasSnapshotVersion() {
+      return isset($this->snapshot_version);
+    }
     public function clear() {
       $this->clearEntityResultType();
       $this->clearEntityResult();
       $this->clearEndCursor();
       $this->clearMoreResults();
       $this->clearSkippedResults();
+      $this->clearSnapshotVersion();
     }
     public function byteSizePartial() {
       $res = 0;
@@ -2105,6 +2127,10 @@
         $res += 1;
         $res += $this->lengthVarInt64($this->skipped_results);
       }
+      if (isset($this->snapshot_version)) {
+        $res += 1;
+        $res += $this->lengthVarInt64($this->snapshot_version);
+      }
       return $res;
     }
     public function outputPartial($out) {
@@ -2130,6 +2156,10 @@
         $out->putVarInt32(48);
         $out->putVarInt32($this->skipped_results);
       }
+      if (isset($this->snapshot_version)) {
+        $out->putVarInt32(56);
+        $out->putVarInt64($this->snapshot_version);
+      }
     }
     public function tryMerge($d) {
       while($d->avail() > 0) {
@@ -2155,6 +2185,9 @@
           case 48:
             $this->setSkippedResults($d->getVarInt32());
             break;
+          case 56:
+            $this->setSnapshotVersion($d->getVarInt64());
+            break;
           case 0:
             throw new \google\net\ProtocolBufferDecodeError();
             break;
@@ -2188,6 +2221,9 @@
       if ($x->hasSkippedResults()) {
         $this->setSkippedResults($x->getSkippedResults());
       }
+      if ($x->hasSnapshotVersion()) {
+        $this->setSnapshotVersion($x->getSnapshotVersion());
+      }
     }
     public function equals($x) {
       if ($x === $this) { return true; }
@@ -2203,6 +2239,8 @@
       if (isset($this->more_results) && $this->more_results !== $x->more_results) return false;
       if (isset($this->skipped_results) !== isset($x->skipped_results)) return false;
       if (isset($this->skipped_results) && !$this->integerEquals($this->skipped_results, $x->skipped_results)) return false;
+      if (isset($this->snapshot_version) !== isset($x->snapshot_version)) return false;
+      if (isset($this->snapshot_version) && !$this->integerEquals($this->snapshot_version, $x->snapshot_version)) return false;
       return true;
     }
     public function shortDebugString($prefix = "") {
@@ -2222,12 +2260,16 @@
       if (isset($this->skipped_results)) {
         $res .= $prefix . "skipped_results: " . $this->debugFormatInt32($this->skipped_results) . "\n";
       }
+      if (isset($this->snapshot_version)) {
+        $res .= $prefix . "snapshot_version: " . $this->debugFormatInt64($this->snapshot_version) . "\n";
+      }
       return $res;
     }
   }
 }
 namespace google\appengine\datastore\v4\Mutation {
   class Operation {
+    const UNKNOWN = 0;
     const INSERT = 1;
     const UPDATE = 2;
     const UPSERT = 3;
@@ -2238,7 +2280,7 @@
   class Mutation extends \google\net\ProtocolMessage {
     public function getOp() {
       if (!isset($this->op)) {
-        return 1;
+        return 0;
       }
       return $this->op;
     }
@@ -2362,7 +2404,6 @@
       };
     }
     public function checkInitialized() {
-      if (!isset($this->op)) return 'op';
       if (isset($this->key) && (!$this->key->isInitialized())) return 'key';
       if (isset($this->entity) && (!$this->entity->isInitialized())) return 'entity';
       return null;
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
index c9e019e..969d331 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageReadClient.php
@@ -133,10 +133,21 @@
   }
 
   /**
-   * Seek within the current file. We only deal with SEEK_SET which we expect
-   * the upper layers of PHP to convert and SEEK_CUR or SEEK_END calls to.
+   * Seek within the current file. We expect the upper layers of PHP to convert
+   * SEEK_CUR to SEEK_SET.
    */
   public function seek($offset, $whence) {
+    if ($whence == SEEK_END) {
+      if (isset($this->object_total_length)) {
+        $whence = SEEK_SET;
+        $offset = $this->object_total_length + $offset;
+      } else {
+        trigger_error("Unable to seek from end for objects with unkonwn size",
+                      E_USER_WARNING);
+        return false;
+      }
+    }
+
     if ($whence != SEEK_SET) {
       trigger_error(sprintf("Unsupported seek mode: %d", $whence),
                     E_USER_WARNING);
@@ -144,7 +155,7 @@
     }
     // If we know the size, then make sure they are only seeking within it.
     if (isset($this->object_total_length) &&
-        $offset > $this->object_total_length) {
+        $offset >= $this->object_total_length) {
       return false;
     }
     if ($offset < 0) {
@@ -156,7 +167,8 @@
     // Check if we can seek inside the current buffer
     $buffer_end = $this->object_block_start_position +
                   strlen($this->read_buffer);
-    if ($this->object_block_start_position <= $offset && $offset < $buffer_end) {
+    if ($this->object_block_start_position <= $offset &&
+        $offset < $buffer_end) {
       $this->buffer_read_position = $offset -
           $this->object_block_start_position;
     } else {
diff --git a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
index 5d0953c..c4a2391 100644
--- a/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
+++ b/php/sdk/google/appengine/ext/cloud_storage_streams/CloudStorageStreamWrapperTest.php
@@ -510,7 +510,7 @@
     $this->apiProxyMock->verify();
   }
 
-  public function testSeekReadObjectSuccess() {
+  public function testSeekSetReadObjectSuccess() {
     $body = "Hello from PHP";
 
     $this->expectFileReadRequest($body,
@@ -528,6 +528,37 @@
     $this->apiProxyMock->verify();
   }
 
+  public function testSeekCurReadObjectSuccess() {
+    $body = "Hello world!";
+
+    $this->expectFileReadRequest($body,
+                                 0,
+                                 CloudStorageReadClient::DEFAULT_READ_SIZE,
+                                 null);
+
+    $valid_path = "gs://bucket/object_name.png";
+    $fp = fopen($valid_path, "r");
+    $this->assertEquals(0, fseek($fp, 2, SEEK_SET));
+    $this->assertEquals(0, fseek($fp, 4, SEEK_CUR));
+    $this->assertEquals('world!', fread($fp, 6));
+    $this->apiProxyMock->verify();
+  }
+
+  public function testSeekEndReadObjectSuccess() {
+    $body = "Hello world!";
+
+    $this->expectFileReadRequest($body,
+                                 0,
+                                 CloudStorageReadClient::DEFAULT_READ_SIZE,
+                                 null);
+
+    $valid_path = "gs://bucket/object_name.png";
+    $fp = fopen($valid_path, "r");
+    $this->assertEquals(0, fseek($fp, -6, SEEK_END));
+    $this->assertEquals('world!', fread($fp, 6));
+    $this->apiProxyMock->verify();
+  }
+
   public function testReadZeroSizedObjectSuccess() {
     $this->expectFileReadRequest("",
                                  0,
diff --git a/wrapper_util.py b/wrapper_util.py
index 44d6bb9..7ba646a 100644
--- a/wrapper_util.py
+++ b/wrapper_util.py
@@ -201,6 +201,7 @@
         os.path.join(dir_path, 'lib', 'concurrent'),
         os.path.join(dir_path, 'lib', 'cherrypy'),
 
+        os.path.join(dir_path, 'lib', 'distutils'),
         os.path.join(dir_path, 'lib', 'requests'),
         os.path.join(dir_path, 'lib', 'six'),
         os.path.join(dir_path, 'lib', 'websocket'),